[ 465.339710] env[61839]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61839) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 465.340097] env[61839]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61839) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 465.340097] env[61839]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61839) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 465.340481] env[61839]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 465.435053] env[61839]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61839) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 465.445391] env[61839]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61839) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 466.045539] env[61839]: INFO nova.virt.driver [None req-c13c9fa3-e89e-4265-9d4a-43c87b485c3a None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 466.117987] env[61839]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 466.118231] env[61839]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 466.118301] env[61839]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61839) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 469.215737] env[61839]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-cb0a31ec-6f44-4a37-80e7-9ea22079d371 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.231863] env[61839]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61839) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 469.231989] env[61839]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-cdd81cd3-c99a-4348-897a-7c9cf552c7d9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.272861] env[61839]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 4d958. [ 469.272997] env[61839]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.155s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 469.273575] env[61839]: INFO nova.virt.vmwareapi.driver [None req-c13c9fa3-e89e-4265-9d4a-43c87b485c3a None None] VMware vCenter version: 7.0.3 [ 469.276946] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c78124-ac4c-4d12-bedd-23358c88e78c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.297451] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27268f1a-8dbf-459d-8389-ac7284e79fcd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.303090] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5831c957-e53e-405a-b716-19ce3e6e051b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.309365] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38a0960-9a8d-4f6f-b904-79e40de8d37e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.321924] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1687d4ef-21a0-43ce-ab44-7ce872c10a68 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.327486] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996c2156-755f-46d1-a021-7aed3f4d839a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.356754] env[61839]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-35b92d6c-59a6-4f94-af7b-b0832955bd9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.362350] env[61839]: DEBUG nova.virt.vmwareapi.driver [None req-c13c9fa3-e89e-4265-9d4a-43c87b485c3a None None] Extension org.openstack.compute already exists. {{(pid=61839) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 469.365089] env[61839]: INFO nova.compute.provider_config [None req-c13c9fa3-e89e-4265-9d4a-43c87b485c3a None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 469.868767] env[61839]: DEBUG nova.context [None req-c13c9fa3-e89e-4265-9d4a-43c87b485c3a None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),40195126-bceb-4b7a-b02f-f1cd88819adc(cell1) {{(pid=61839) load_cells /opt/stack/nova/nova/context.py:464}} [ 469.871057] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 469.871325] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 469.872010] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 469.872447] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Acquiring lock "40195126-bceb-4b7a-b02f-f1cd88819adc" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 469.872640] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Lock "40195126-bceb-4b7a-b02f-f1cd88819adc" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 469.873647] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Lock "40195126-bceb-4b7a-b02f-f1cd88819adc" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 469.893349] env[61839]: INFO dbcounter [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Registered counter for database nova_cell0 [ 469.901299] env[61839]: INFO dbcounter [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Registered counter for database nova_cell1 [ 469.904738] env[61839]: DEBUG oslo_db.sqlalchemy.engines [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61839) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 469.905099] env[61839]: DEBUG oslo_db.sqlalchemy.engines [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61839) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 469.910080] env[61839]: ERROR nova.db.main.api [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 469.910080] env[61839]: result = function(*args, **kwargs) [ 469.910080] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 469.910080] env[61839]: return func(*args, **kwargs) [ 469.910080] env[61839]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 469.910080] env[61839]: result = fn(*args, **kwargs) [ 469.910080] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 469.910080] env[61839]: return f(*args, **kwargs) [ 469.910080] env[61839]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 469.910080] env[61839]: return db.service_get_minimum_version(context, binaries) [ 469.910080] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 469.910080] env[61839]: _check_db_access() [ 469.910080] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 469.910080] env[61839]: stacktrace = ''.join(traceback.format_stack()) [ 469.910080] env[61839]: [ 469.910808] env[61839]: ERROR nova.db.main.api [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 469.910808] env[61839]: result = function(*args, **kwargs) [ 469.910808] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 469.910808] env[61839]: return func(*args, **kwargs) [ 469.910808] env[61839]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 469.910808] env[61839]: result = fn(*args, **kwargs) [ 469.910808] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 469.910808] env[61839]: return f(*args, **kwargs) [ 469.910808] env[61839]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 469.910808] env[61839]: return db.service_get_minimum_version(context, binaries) [ 469.910808] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 469.910808] env[61839]: _check_db_access() [ 469.910808] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 469.910808] env[61839]: stacktrace = ''.join(traceback.format_stack()) [ 469.910808] env[61839]: [ 469.911239] env[61839]: WARNING nova.objects.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 469.911338] env[61839]: WARNING nova.objects.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Failed to get minimum service version for cell 40195126-bceb-4b7a-b02f-f1cd88819adc [ 469.911759] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Acquiring lock "singleton_lock" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 469.911921] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Acquired lock "singleton_lock" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 469.912186] env[61839]: DEBUG oslo_concurrency.lockutils [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Releasing lock "singleton_lock" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 469.912529] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Full set of CONF: {{(pid=61839) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 469.912678] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ******************************************************************************** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 469.912806] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Configuration options gathered from: {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 469.912940] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 469.913148] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 469.913287] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ================================================================================ {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 469.913501] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] allow_resize_to_same_host = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.913669] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] arq_binding_timeout = 300 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.913799] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] backdoor_port = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.913924] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] backdoor_socket = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.914107] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] block_device_allocate_retries = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.914280] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] block_device_allocate_retries_interval = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.914454] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cert = self.pem {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.914619] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.914801] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute_monitors = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.914973] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] config_dir = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.915155] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] config_drive_format = iso9660 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.915289] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.915453] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] config_source = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.915614] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] console_host = devstack {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.915777] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] control_exchange = nova {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.915935] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cpu_allocation_ratio = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.916126] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] daemon = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.916299] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] debug = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.916459] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] default_access_ip_network_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.916623] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] default_availability_zone = nova {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.916776] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] default_ephemeral_format = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.916942] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] default_green_pool_size = 1000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.917194] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.917362] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] default_schedule_zone = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.917519] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] disk_allocation_ratio = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.917675] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] enable_new_services = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.917850] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] enabled_apis = ['osapi_compute'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.918018] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] enabled_ssl_apis = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.918185] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] flat_injected = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.918340] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] force_config_drive = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.918493] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] force_raw_images = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.918661] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] graceful_shutdown_timeout = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.918816] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] heal_instance_info_cache_interval = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.919052] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] host = cpu-1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.919235] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.919398] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] initial_disk_allocation_ratio = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.919558] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] initial_ram_allocation_ratio = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.919767] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.919929] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instance_build_timeout = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.920102] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instance_delete_interval = 300 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.920284] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instance_format = [instance: %(uuid)s] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.920446] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instance_name_template = instance-%08x {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.920605] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instance_usage_audit = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.920771] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instance_usage_audit_period = month {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.920931] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.921107] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] instances_path = /opt/stack/data/nova/instances {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.921296] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] internal_service_availability_zone = internal {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.921435] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] key = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.921593] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] live_migration_retry_count = 30 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.921759] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_color = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.921922] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_config_append = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.922098] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.922262] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_dir = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.922420] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.922554] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_options = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.922706] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_rotate_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.922873] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_rotate_interval_type = days {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.923045] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] log_rotation_type = none {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.923182] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.923318] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.923482] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.923644] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.923771] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.923931] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] long_rpc_timeout = 1800 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.924102] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] max_concurrent_builds = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.924261] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] max_concurrent_live_migrations = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.924422] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] max_concurrent_snapshots = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.924578] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] max_local_block_devices = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.924734] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] max_logfile_count = 30 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.924890] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] max_logfile_size_mb = 200 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.925057] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] maximum_instance_delete_attempts = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.925226] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] metadata_listen = 0.0.0.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.925391] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] metadata_listen_port = 8775 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.925553] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] metadata_workers = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.925710] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] migrate_max_retries = -1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.925875] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] mkisofs_cmd = genisoimage {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.926087] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] my_block_storage_ip = 10.180.1.21 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.926223] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] my_ip = 10.180.1.21 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.926384] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] network_allocate_retries = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.926559] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.926724] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] osapi_compute_listen = 0.0.0.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.926883] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] osapi_compute_listen_port = 8774 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.927055] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] osapi_compute_unique_server_name_scope = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.927223] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] osapi_compute_workers = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.927389] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] password_length = 12 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.927546] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] periodic_enable = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.927703] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] periodic_fuzzy_delay = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.927865] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] pointer_model = usbtablet {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.928037] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] preallocate_images = none {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.928199] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] publish_errors = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.928328] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] pybasedir = /opt/stack/nova {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.928481] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ram_allocation_ratio = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.928638] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] rate_limit_burst = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.928801] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] rate_limit_except_level = CRITICAL {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.928958] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] rate_limit_interval = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.929128] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] reboot_timeout = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.929287] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] reclaim_instance_interval = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.929440] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] record = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.929602] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] reimage_timeout_per_gb = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.929765] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] report_interval = 120 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.929922] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] rescue_timeout = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.930091] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] reserved_host_cpus = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.930257] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] reserved_host_disk_mb = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.930410] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] reserved_host_memory_mb = 512 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.930565] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] reserved_huge_pages = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.930726] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] resize_confirm_window = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.930879] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] resize_fs_using_block_device = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.931044] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] resume_guests_state_on_host_boot = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.931214] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.931375] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] rpc_response_timeout = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.931530] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] run_external_periodic_tasks = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.931694] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] running_deleted_instance_action = reap {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.931850] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] running_deleted_instance_poll_interval = 1800 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.932014] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] running_deleted_instance_timeout = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.932173] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler_instance_sync_interval = 120 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.932337] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_down_time = 720 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.932516] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] servicegroup_driver = db {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.932661] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] shell_completion = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.932815] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] shelved_offload_time = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.932971] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] shelved_poll_interval = 3600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.933147] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] shutdown_timeout = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.933306] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] source_is_ipv6 = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.933487] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ssl_only = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.933700] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.933864] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] sync_power_state_interval = 600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.934033] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] sync_power_state_pool_size = 1000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.934204] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] syslog_log_facility = LOG_USER {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.934361] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] tempdir = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.934519] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] timeout_nbd = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.934684] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] transport_url = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.934842] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] update_resources_interval = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.935000] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] use_cow_images = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.935171] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] use_eventlog = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.935329] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] use_journal = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.935486] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] use_json = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.935642] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] use_rootwrap_daemon = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.935813] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] use_stderr = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.935984] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] use_syslog = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.936168] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vcpu_pin_set = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.936346] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plugging_is_fatal = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.936527] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plugging_timeout = 300 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.936704] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] virt_mkfs = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.936877] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] volume_usage_poll_interval = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.937058] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] watch_log_file = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.937242] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] web = /usr/share/spice-html5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 469.937438] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.937614] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.937791] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.937982] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_concurrency.disable_process_locking = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.938573] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.938767] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.938951] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.939142] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.939319] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.939488] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.939671] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.auth_strategy = keystone {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.939840] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.compute_link_prefix = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.940019] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.940207] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.dhcp_domain = novalocal {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.940365] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.enable_instance_password = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.940532] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.glance_link_prefix = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.940697] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.940869] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.941039] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.instance_list_per_project_cells = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.941206] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.list_records_by_skipping_down_cells = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.941371] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.local_metadata_per_cell = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.941535] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.max_limit = 1000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.941703] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.metadata_cache_expiration = 15 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.941874] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.neutron_default_tenant_id = default {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.942055] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.response_validation = warn {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.942230] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.use_neutron_default_nets = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.942401] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.942572] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.942732] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.942901] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.943082] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.vendordata_dynamic_targets = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.943256] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.vendordata_jsonfile_path = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.943444] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.943640] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.backend = dogpile.cache.memcached {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.943858] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.backend_argument = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.944048] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.config_prefix = cache.oslo {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.944224] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.dead_timeout = 60.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.944390] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.debug_cache_backend = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.944550] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.enable_retry_client = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.944723] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.enable_socket_keepalive = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.944873] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.enabled = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.945043] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.enforce_fips_mode = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.945211] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.expiration_time = 600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.945378] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.hashclient_retry_attempts = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.945543] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.hashclient_retry_delay = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.945704] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_dead_retry = 300 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.945862] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_password = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.946031] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.946199] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.946362] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_pool_maxsize = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.946523] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.946684] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_sasl_enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.946864] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.947040] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_socket_timeout = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.947208] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.memcache_username = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.947376] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.proxies = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.947541] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.redis_db = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.947701] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.redis_password = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.947871] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.redis_sentinel_service_name = mymaster {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.948059] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.948231] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.redis_server = localhost:6379 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.948396] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.redis_socket_timeout = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.948556] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.redis_username = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.948716] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.retry_attempts = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.948879] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.retry_delay = 0.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.949053] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.socket_keepalive_count = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.949218] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.socket_keepalive_idle = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.949377] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.socket_keepalive_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.949534] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.tls_allowed_ciphers = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.949690] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.tls_cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.949844] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.tls_certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.950008] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.tls_enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.950181] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cache.tls_keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.950341] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.950515] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.auth_type = password {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.950676] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.950850] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.catalog_info = volumev3::publicURL {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.951014] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.951189] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.951354] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.cross_az_attach = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.951513] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.debug = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.951672] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.endpoint_template = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.951833] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.http_retries = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.951995] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.952169] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.952340] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.os_region_name = RegionOne {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.952507] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.952664] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cinder.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.952832] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.952989] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.cpu_dedicated_set = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.953157] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.cpu_shared_set = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.953322] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.image_type_exclude_list = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.953484] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.953684] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.max_concurrent_disk_ops = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.953800] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.max_disk_devices_to_attach = -1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.953960] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.954138] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.954301] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.resource_provider_association_refresh = 300 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.954458] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.954614] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.shutdown_retry_interval = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.954792] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.954967] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] conductor.workers = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.955155] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] console.allowed_origins = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.955317] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] console.ssl_ciphers = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.955484] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] console.ssl_minimum_version = default {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.955651] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] consoleauth.enforce_session_timeout = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.955817] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] consoleauth.token_ttl = 600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.955982] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.956153] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.956314] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.956474] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.connect_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.956628] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.connect_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.956783] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.endpoint_override = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.956943] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.957109] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.957267] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.max_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.957427] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.min_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.957580] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.region_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.957736] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.retriable_status_codes = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.957889] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.service_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.958066] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.service_type = accelerator {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.958238] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.958412] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.status_code_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.958564] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.status_code_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.958719] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.958897] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.959065] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] cyborg.version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.959247] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.backend = sqlalchemy {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.959417] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.connection = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.959583] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.connection_debug = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.959750] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.connection_parameters = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.959911] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.connection_recycle_time = 3600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.960081] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.connection_trace = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.960247] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.db_inc_retry_interval = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.960408] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.db_max_retries = 20 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.960568] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.db_max_retry_interval = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.960727] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.db_retry_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.960883] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.max_overflow = 50 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.961051] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.max_pool_size = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.961214] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.max_retries = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.961378] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.961533] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.mysql_wsrep_sync_wait = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.961688] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.pool_timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.961845] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.retry_interval = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.962009] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.slave_connection = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.962173] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.sqlite_synchronous = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.962332] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] database.use_db_reconnect = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.962508] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.backend = sqlalchemy {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.962673] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.connection = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.962835] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.connection_debug = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.963009] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.connection_parameters = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.963178] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.connection_recycle_time = 3600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.963340] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.connection_trace = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.963497] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.db_inc_retry_interval = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.963655] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.db_max_retries = 20 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.963812] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.db_max_retry_interval = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.963969] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.db_retry_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.964139] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.max_overflow = 50 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.964300] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.max_pool_size = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.964459] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.max_retries = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.964625] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.964781] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.965184] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.pool_timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.965184] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.retry_interval = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.965264] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.slave_connection = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.965402] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] api_database.sqlite_synchronous = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.965576] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] devices.enabled_mdev_types = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.965749] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.965917] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ephemeral_storage_encryption.default_format = luks {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.966093] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ephemeral_storage_encryption.enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.966258] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.966425] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.api_servers = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.966585] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.966744] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.966905] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.967070] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.connect_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.967233] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.connect_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.967393] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.debug = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.967556] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.default_trusted_certificate_ids = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.967716] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.enable_certificate_validation = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.967877] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.enable_rbd_download = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.968044] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.endpoint_override = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.968214] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.968376] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.968532] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.max_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.968688] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.min_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.968847] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.num_retries = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.969022] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.rbd_ceph_conf = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.969193] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.rbd_connect_timeout = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.969357] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.rbd_pool = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.969521] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.rbd_user = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.969681] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.region_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.969840] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.retriable_status_codes = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.969997] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.service_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.970225] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.service_type = image {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.970344] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.970507] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.status_code_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.970665] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.status_code_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.970821] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.970998] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.971177] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.verify_glance_signatures = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.971342] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] glance.version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.971529] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] guestfs.debug = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.971698] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] mks.enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.972068] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.972264] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] image_cache.manager_interval = 2400 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.972433] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] image_cache.precache_concurrency = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.972602] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] image_cache.remove_unused_base_images = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.972767] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.972931] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.973116] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] image_cache.subdirectory_name = _base {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.973294] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.api_max_retries = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.973458] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.api_retry_interval = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.973614] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.973772] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.auth_type = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.973928] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.974092] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.974255] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.974416] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.conductor_group = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.974572] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.connect_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.974728] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.connect_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.974882] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.endpoint_override = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.975055] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.975215] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.975371] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.max_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.975525] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.min_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.975688] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.peer_list = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.975842] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.region_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.975998] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.retriable_status_codes = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.976172] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.serial_console_state_timeout = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.976328] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.service_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.976494] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.service_type = baremetal {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.976649] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.shard = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.976807] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.976959] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.status_code_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.977126] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.status_code_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.977284] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.977459] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.977616] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ironic.version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.977796] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.977967] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] key_manager.fixed_key = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.978165] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.978326] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.barbican_api_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.978486] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.barbican_endpoint = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.978651] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.barbican_endpoint_type = public {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.978807] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.barbican_region_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.978963] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.979131] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.979292] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.979451] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.979605] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.979762] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.number_of_retries = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.979920] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.retry_delay = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.980089] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.send_service_user_token = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.980254] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.980411] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.980567] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.verify_ssl = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.980720] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican.verify_ssl_path = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.980884] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.981055] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.auth_type = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.981217] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.981377] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.981538] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.981695] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.981851] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.982016] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.982176] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] barbican_service_user.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.982346] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.approle_role_id = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.982502] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.approle_secret_id = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.982671] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.kv_mountpoint = secret {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.982827] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.kv_path = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.982987] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.kv_version = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.983157] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.namespace = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.983315] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.root_token_id = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.983500] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.ssl_ca_crt_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.983634] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.timeout = 60.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.983790] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.use_ssl = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.983967] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.984128] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.984289] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.auth_type = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.984447] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.984600] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.984757] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.984912] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.connect_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.985077] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.connect_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.985238] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.endpoint_override = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.985398] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.985552] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.985706] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.max_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.985859] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.min_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.986024] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.region_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.986187] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.retriable_status_codes = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.986346] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.service_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.986512] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.service_type = identity {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.986671] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.986827] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.status_code_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.986982] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.status_code_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.987148] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.987325] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.987485] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] keystone.version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.987682] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.connection_uri = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.987839] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.cpu_mode = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.988013] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.cpu_model_extra_flags = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.988186] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.cpu_models = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.988355] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.cpu_power_governor_high = performance {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.988520] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.cpu_power_governor_low = powersave {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.988681] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.cpu_power_management = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.988848] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.989026] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.device_detach_attempts = 8 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.989180] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.device_detach_timeout = 20 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.989345] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.disk_cachemodes = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.989502] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.disk_prefix = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.989663] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.enabled_perf_events = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.989820] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.file_backed_memory = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.989983] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.gid_maps = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.990153] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.hw_disk_discard = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.990315] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.hw_machine_type = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.990485] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.images_rbd_ceph_conf = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.990651] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.990815] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.990980] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.images_rbd_glance_store_name = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.991160] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.images_rbd_pool = rbd {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.991329] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.images_type = default {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.991489] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.images_volume_group = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.991651] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.inject_key = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.991810] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.inject_partition = -2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.991968] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.inject_password = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.992143] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.iscsi_iface = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.992303] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.iser_use_multipath = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.992465] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_bandwidth = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.992625] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.992785] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_downtime = 500 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.992945] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.993117] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.993278] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_inbound_addr = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.993441] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.993607] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_permit_post_copy = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.993768] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_scheme = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.993941] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_timeout_action = abort {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.994115] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_tunnelled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.994277] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_uri = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.994437] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.live_migration_with_native_tls = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.994592] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.max_queues = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.994751] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.994980] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.995156] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.nfs_mount_options = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.995461] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.995636] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.995801] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.num_iser_scan_tries = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.995960] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.num_memory_encrypted_guests = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.996134] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.996296] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.num_pcie_ports = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.996462] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.num_volume_scan_tries = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.996627] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.pmem_namespaces = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.996786] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.quobyte_client_cfg = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.997078] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.997255] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rbd_connect_timeout = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.997421] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.997585] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.997747] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rbd_secret_uuid = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.997907] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rbd_user = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.998084] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.998259] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.remote_filesystem_transport = ssh {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.998422] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rescue_image_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.998579] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rescue_kernel_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.998735] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rescue_ramdisk_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.998901] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.999069] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.rx_queue_size = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.999238] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.smbfs_mount_options = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.999520] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.999692] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.snapshot_compression = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 469.999854] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.snapshot_image_format = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.000078] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.000256] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.sparse_logical_volumes = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.000415] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.swtpm_enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.000578] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.swtpm_group = tss {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.000741] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.swtpm_user = tss {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.000904] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.sysinfo_serial = unique {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.001072] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.tb_cache_size = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.001232] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.tx_queue_size = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.001395] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.uid_maps = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.001553] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.use_virtio_for_bridges = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.001718] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.virt_type = kvm {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.001883] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.volume_clear = zero {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.002052] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.volume_clear_size = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.002221] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.volume_use_multipath = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.002379] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.vzstorage_cache_path = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.002544] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.002709] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.vzstorage_mount_group = qemu {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.002870] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.vzstorage_mount_opts = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.003042] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.003322] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.003502] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.vzstorage_mount_user = stack {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.003692] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.003838] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.004015] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.auth_type = password {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.004186] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.004347] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.004508] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.004665] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.connect_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.004821] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.connect_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.004990] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.default_floating_pool = public {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.005163] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.endpoint_override = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.005327] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.extension_sync_interval = 600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.005489] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.http_retries = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.005648] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.005804] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.005959] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.max_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.006139] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.006299] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.min_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.006465] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.ovs_bridge = br-int {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.006627] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.physnets = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.006795] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.region_name = RegionOne {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.006953] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.retriable_status_codes = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.007129] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.service_metadata_proxy = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.007288] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.service_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.007455] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.service_type = network {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.007614] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.007768] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.status_code_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.007924] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.status_code_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.008100] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.008268] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.008426] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] neutron.version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.008593] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] notifications.bdms_in_notifications = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.008763] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] notifications.default_level = INFO {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.008936] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] notifications.notification_format = unversioned {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.009117] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] notifications.notify_on_state_change = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.009296] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.009472] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] pci.alias = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.009639] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] pci.device_spec = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.009799] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] pci.report_in_placement = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.009969] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.010153] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.auth_type = password {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.010323] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.010480] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.010635] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.010792] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.010948] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.connect_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.011115] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.connect_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.011275] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.default_domain_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.011429] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.default_domain_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.011584] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.domain_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.011739] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.domain_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.011892] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.endpoint_override = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.012057] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.012216] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.012370] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.max_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.012521] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.min_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.012682] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.password = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.012836] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.project_domain_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.012999] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.project_domain_name = Default {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.013179] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.project_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.013351] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.project_name = service {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.013517] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.region_name = RegionOne {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.013677] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.retriable_status_codes = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.013836] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.service_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.014009] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.service_type = placement {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.014178] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.014340] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.status_code_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.014494] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.status_code_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.014649] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.system_scope = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.014802] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.014955] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.trust_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.015123] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.user_domain_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.015289] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.user_domain_name = Default {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.015446] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.user_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.015616] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.username = nova {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.015792] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.015948] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] placement.version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.016137] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.cores = 20 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.016300] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.count_usage_from_placement = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.016469] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.016640] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.injected_file_content_bytes = 10240 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.016804] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.injected_file_path_length = 255 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.016969] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.injected_files = 5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.017146] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.instances = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.017313] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.key_pairs = 100 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.017477] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.metadata_items = 128 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.017638] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.ram = 51200 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.017797] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.recheck_quota = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.017969] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.server_group_members = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.018151] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] quota.server_groups = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.018326] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.018489] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.018647] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.image_metadata_prefilter = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.018807] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.018968] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.max_attempts = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.019139] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.max_placement_results = 1000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.019303] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.019464] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.query_placement_for_image_type_support = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.019622] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.019791] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] scheduler.workers = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.019959] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.020139] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.020320] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.020488] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.020649] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.020809] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.020969] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.021172] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.021340] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.host_subset_size = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.021506] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.021664] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.021826] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.021989] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.isolated_hosts = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.022166] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.isolated_images = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.022328] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.022491] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.022654] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.022815] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.pci_in_placement = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.022974] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.023146] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.023307] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.023467] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.023627] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.023785] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.023943] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.track_instance_changes = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.024132] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.024303] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] metrics.required = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.024468] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] metrics.weight_multiplier = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.024629] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.024792] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] metrics.weight_setting = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.025119] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.025298] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] serial_console.enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.025474] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] serial_console.port_range = 10000:20000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.025644] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.025811] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.025977] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] serial_console.serialproxy_port = 6083 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.026157] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.026329] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.auth_type = password {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.026491] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.026648] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.026809] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.026967] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.027137] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.027306] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.send_service_user_token = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.027471] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.027641] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] service_user.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.027809] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.agent_enabled = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.027970] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.028292] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.028489] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.028659] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.html5proxy_port = 6082 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.028819] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.image_compression = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.028974] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.jpeg_compression = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.029145] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.playback_compression = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.029306] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.require_secure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.029476] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.server_listen = 127.0.0.1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.029643] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.029803] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.streaming_mode = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.029959] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] spice.zlib_compression = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.030139] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] upgrade_levels.baseapi = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.030313] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] upgrade_levels.compute = auto {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.030476] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] upgrade_levels.conductor = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.030633] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] upgrade_levels.scheduler = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.030797] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.030956] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.auth_type = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.031124] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.031283] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.031442] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.031601] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.031755] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.031913] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.032077] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vendordata_dynamic_auth.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.032252] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.api_retry_count = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.032414] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.ca_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.032581] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.cache_prefix = devstack-image-cache {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.032745] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.cluster_name = testcl1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.032908] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.connection_pool_size = 10 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.033077] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.console_delay_seconds = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.033315] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.datastore_regex = ^datastore.* {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.033446] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.033619] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.host_password = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.033784] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.host_port = 443 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.033995] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.host_username = administrator@vsphere.local {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.034127] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.insecure = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.034289] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.integration_bridge = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.034459] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.maximum_objects = 100 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.034611] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.pbm_default_policy = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.034769] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.pbm_enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.034925] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.pbm_wsdl_location = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.035101] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.035264] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.serial_port_proxy_uri = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.035423] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.serial_port_service_uri = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.035587] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.task_poll_interval = 0.5 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.035757] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.use_linked_clone = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.035925] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.vnc_keymap = en-us {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.036096] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.vnc_port = 5900 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.036258] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vmware.vnc_port_total = 10000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.036441] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.auth_schemes = ['none'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.036614] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.036895] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.037089] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.037262] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.novncproxy_port = 6080 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.037438] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.server_listen = 127.0.0.1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.037608] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.037768] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.vencrypt_ca_certs = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.037927] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.vencrypt_client_cert = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.038093] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vnc.vencrypt_client_key = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.038273] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.038435] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.disable_deep_image_inspection = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.038593] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.038751] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.038909] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.039079] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.disable_rootwrap = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.039242] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.enable_numa_live_migration = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.039402] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.039561] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.039718] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.039875] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.libvirt_disable_apic = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.040041] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.040216] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.040375] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.040534] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.040693] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.040852] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.041018] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.041179] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.041334] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.041494] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.041675] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.041840] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.client_socket_timeout = 900 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.042010] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.default_pool_size = 1000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.042183] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.keep_alive = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.042349] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.max_header_line = 16384 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.042512] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.secure_proxy_ssl_header = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.042669] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.ssl_ca_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.042829] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.ssl_cert_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.042987] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.ssl_key_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.043162] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.tcp_keepidle = 600 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.043341] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.043508] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] zvm.ca_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.043666] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] zvm.cloud_connector_url = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.043952] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.044136] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] zvm.reachable_timeout = 300 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.044319] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.enforce_new_defaults = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.044712] env[61839]: WARNING oslo_config.cfg [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 470.044892] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.enforce_scope = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.045080] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.policy_default_rule = default {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.045275] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.045457] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.policy_file = policy.yaml {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.045636] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.045796] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.045956] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.046126] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.046289] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.046458] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.046632] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.046805] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.connection_string = messaging:// {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.046970] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.enabled = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.047151] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.es_doc_type = notification {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.047316] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.es_scroll_size = 10000 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.047482] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.es_scroll_time = 2m {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.047644] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.filter_error_trace = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.047810] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.hmac_keys = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.047975] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.sentinel_service_name = mymaster {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.048153] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.socket_timeout = 0.1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.048315] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.trace_requests = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.048475] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler.trace_sqlalchemy = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.048648] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler_jaeger.process_tags = {} {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.048804] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler_jaeger.service_name_prefix = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.048967] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] profiler_otlp.service_name_prefix = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.049141] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] remote_debug.host = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.049303] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] remote_debug.port = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.049481] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.049644] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.049805] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.049966] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.050140] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.050303] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.050463] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.050624] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.050783] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.050953] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.051124] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.051296] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.051463] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.051632] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.051801] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.051964] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.052136] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.052309] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.052470] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.052628] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.052790] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.052951] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.053121] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.053289] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.053447] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.053603] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.053760] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.053917] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.054093] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.054262] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.ssl = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.054433] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.054600] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.054758] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.054923] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.055098] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.ssl_version = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.055261] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.055445] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.055606] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_notifications.retry = -1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.055785] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.055958] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_messaging_notifications.transport_url = **** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.056140] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.auth_section = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.056304] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.auth_type = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.056465] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.cafile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.056623] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.certfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.056783] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.collect_timing = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.056940] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.connect_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.057108] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.connect_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.057267] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.endpoint_id = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.057423] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.endpoint_override = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.057581] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.insecure = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.057733] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.keyfile = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.057885] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.max_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.058048] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.min_version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.058205] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.region_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.058373] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.retriable_status_codes = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.058538] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.service_name = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.058694] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.service_type = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.058855] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.split_loggers = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.059016] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.status_code_retries = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.059183] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.status_code_retry_delay = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.059338] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.timeout = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.059492] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.valid_interfaces = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.059643] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_limit.version = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.059804] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_reports.file_event_handler = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.059964] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.060133] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] oslo_reports.log_dir = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.060304] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.060461] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.060617] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.060780] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.060939] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.061107] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.061276] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.061433] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_ovs_privileged.group = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.061589] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.061749] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.061905] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.062070] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] vif_plug_ovs_privileged.user = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.062242] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.flat_interface = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.062420] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.062591] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.062760] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.062929] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.063106] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.063273] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.063437] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.063613] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.063782] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_ovs.isolate_vif = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.063948] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.064124] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.064297] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.064473] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_ovs.ovsdb_interface = native {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.064635] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] os_vif_ovs.per_port_bridge = False {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.064824] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] privsep_osbrick.capabilities = [21] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.064953] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] privsep_osbrick.group = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.065119] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] privsep_osbrick.helper_command = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.065282] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.065442] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.065596] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] privsep_osbrick.user = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.065764] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.065920] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] nova_sys_admin.group = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.066085] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] nova_sys_admin.helper_command = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.066252] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.066414] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.066567] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] nova_sys_admin.user = None {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.066692] env[61839]: DEBUG oslo_service.service [None req-77a759a3-f235-4c73-be5e-e55473fd902a None None] ******************************************************************************** {{(pid=61839) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 470.067190] env[61839]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 470.570817] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Getting list of instances from cluster (obj){ [ 470.570817] env[61839]: value = "domain-c8" [ 470.570817] env[61839]: _type = "ClusterComputeResource" [ 470.570817] env[61839]: } {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 470.572040] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9e33ce-547b-48af-a6a8-aa7583ef788e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.580513] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Got total of 0 instances {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 470.581036] env[61839]: WARNING nova.virt.vmwareapi.driver [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 470.581549] env[61839]: INFO nova.virt.node [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Generated node identity cef329e6-1ccd-42a8-bbc4-109a06d1c908 [ 470.581774] env[61839]: INFO nova.virt.node [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Wrote node identity cef329e6-1ccd-42a8-bbc4-109a06d1c908 to /opt/stack/data/n-cpu-1/compute_id [ 471.087065] env[61839]: WARNING nova.compute.manager [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Compute nodes ['cef329e6-1ccd-42a8-bbc4-109a06d1c908'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 472.093063] env[61839]: INFO nova.compute.manager [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 473.103284] env[61839]: WARNING nova.compute.manager [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 473.103633] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 473.103835] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 473.103987] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 473.104158] env[61839]: DEBUG nova.compute.resource_tracker [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 473.105080] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77de475-1acb-4792-8740-75b218acaeb8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.113338] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b666a8-632a-4d05-850d-1fd24ffbaeb9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.127054] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e16ce1-91f3-4bcf-9dbc-47029ec2d07c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.133212] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4902faf-389a-4b95-9ce1-197e2805a2bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.161787] env[61839]: DEBUG nova.compute.resource_tracker [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181436MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 473.161913] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 473.162114] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 473.664731] env[61839]: WARNING nova.compute.resource_tracker [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] No compute node record for cpu-1:cef329e6-1ccd-42a8-bbc4-109a06d1c908: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cef329e6-1ccd-42a8-bbc4-109a06d1c908 could not be found. [ 474.168602] env[61839]: INFO nova.compute.resource_tracker [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: cef329e6-1ccd-42a8-bbc4-109a06d1c908 [ 475.676597] env[61839]: DEBUG nova.compute.resource_tracker [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 475.676943] env[61839]: DEBUG nova.compute.resource_tracker [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 475.826153] env[61839]: INFO nova.scheduler.client.report [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] [req-15f12b11-0695-40d3-b213-5a1853b9ae5b] Created resource provider record via placement API for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 475.843222] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfb5e13-c22c-436f-8b11-ec1044032216 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.850834] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d2cd25-c061-4a17-996a-678c5226c991 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.880012] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164c6d9d-9338-48b3-80aa-37d2ada7f75e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.886813] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575e5533-dfa5-4a0c-a676-6da0413ea0d5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.899297] env[61839]: DEBUG nova.compute.provider_tree [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 476.436496] env[61839]: DEBUG nova.scheduler.client.report [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 476.436729] env[61839]: DEBUG nova.compute.provider_tree [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 0 to 1 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 476.436870] env[61839]: DEBUG nova.compute.provider_tree [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 476.486352] env[61839]: DEBUG nova.compute.provider_tree [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 1 to 2 during operation: update_traits {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 476.991459] env[61839]: DEBUG nova.compute.resource_tracker [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 476.991896] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.830s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 476.991896] env[61839]: DEBUG nova.service [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Creating RPC server for service compute {{(pid=61839) start /opt/stack/nova/nova/service.py:186}} [ 477.005519] env[61839]: DEBUG nova.service [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] Join ServiceGroup membership for this service compute {{(pid=61839) start /opt/stack/nova/nova/service.py:203}} [ 477.005727] env[61839]: DEBUG nova.servicegroup.drivers.db [None req-e3fc9926-085b-4592-8a98-7b00960707ae None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61839) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 509.805730] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Acquiring lock "95685d36-7efa-42a9-8722-5b90d6edbce5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 509.805730] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Lock "95685d36-7efa-42a9-8722-5b90d6edbce5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.308874] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 510.855329] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.855604] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.859835] env[61839]: INFO nova.compute.claims [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 511.905050] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accec403-d786-408d-b9ae-f390f56a01d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.915692] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de734fa-96e2-4ebc-a4e6-282cc46565c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.949240] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7bd5ae-7be6-4e45-b1b7-293b93a140d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.956624] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc29edf-5773-4db1-a07a-f9a022cb6a0e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.975116] env[61839]: DEBUG nova.compute.provider_tree [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 512.456179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Acquiring lock "c258dbf2-be81-40e5-a11a-03dee332d3b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.456650] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Lock "c258dbf2-be81-40e5-a11a-03dee332d3b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.478298] env[61839]: DEBUG nova.scheduler.client.report [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 512.958610] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 512.989510] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.134s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 512.990366] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 513.496386] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.496679] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.502288] env[61839]: INFO nova.compute.claims [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 513.508185] env[61839]: DEBUG nova.compute.utils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 513.510856] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 513.513022] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 514.027422] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 514.616677] env[61839]: DEBUG nova.policy [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '00bc43bd709d4edf92dfb9171ff12d37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46d2db7be6d6430c9038494a9a4677a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 514.626968] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778d6dd6-936c-4bf4-9807-05e9e1e97ef4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.637231] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e1ad2e-e880-4036-8675-9f2c22961dd2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.672056] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49812ff8-7ec5-4979-8e4c-189a87746d7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.679528] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82645d82-83e9-4e5f-aceb-906af4c6e1ba {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.694167] env[61839]: DEBUG nova.compute.provider_tree [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 514.946704] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Acquiring lock "be976871-706d-41e1-9423-55ff251a52e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.947550] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Lock "be976871-706d-41e1-9423-55ff251a52e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.045147] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 515.083671] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 515.083884] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 515.084056] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 515.084300] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 515.084392] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 515.084530] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 515.084737] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 515.084950] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 515.085308] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 515.085489] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 515.085865] env[61839]: DEBUG nova.virt.hardware [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 515.086764] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370b38bf-e615-4bf0-a00e-f7e495401670 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.095512] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392e7be1-64cb-4544-8f71-ee170dd6a751 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.112487] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96613f6c-bbf4-41af-9117-c426eb4f2c16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.196367] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Successfully created port: cb0e2d64-499b-40f4-8a14-5de5a5754bc3 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 515.199052] env[61839]: DEBUG nova.scheduler.client.report [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 515.450009] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 515.706982] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 515.707567] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 515.982971] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.983789] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.985210] env[61839]: INFO nova.compute.claims [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 516.216025] env[61839]: DEBUG nova.compute.utils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 516.218669] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 516.218669] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 516.320736] env[61839]: DEBUG nova.policy [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbb9a816460742fba39b715a094eb206', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e457ac9501e40b2b3097deae63be2a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 516.725388] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 516.907061] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Successfully created port: 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 517.104748] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e3ccb5-8b3a-4054-b348-3e61decf478c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.113232] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461e36cc-a695-441d-9723-aa4622e78263 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.153974] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469ccb55-5e58-4b84-bd99-5cd0b6f67f69 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.162557] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9c2d2c-84f6-4766-beba-0558dee0edfa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.181784] env[61839]: DEBUG nova.compute.provider_tree [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 517.199753] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "211e8267-3c33-42c8-852f-1c20d7987453" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.199753] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "211e8267-3c33-42c8-852f-1c20d7987453" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.414778] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Acquiring lock "570fb8fe-391a-4f1b-be51-17979e9fb049" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.415047] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Lock "570fb8fe-391a-4f1b-be51-17979e9fb049" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.684063] env[61839]: DEBUG nova.scheduler.client.report [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 517.699641] env[61839]: DEBUG nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 517.736919] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 517.773936] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 517.774123] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 517.774292] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 517.774423] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 517.774635] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 517.774703] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 517.774902] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 517.776029] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 517.776029] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 517.776392] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 517.777231] env[61839]: DEBUG nova.virt.hardware [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 517.778367] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59cb448-555c-4b87-9c56-6c17ce1ea080 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.787157] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754f5b69-305f-46c3-b9da-09860bd15f42 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.918296] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 518.035776] env[61839]: ERROR nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. [ 518.035776] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 518.035776] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.035776] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 518.035776] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.035776] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 518.035776] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.035776] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 518.035776] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.035776] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 518.035776] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.035776] env[61839]: ERROR nova.compute.manager raise self.value [ 518.035776] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.035776] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 518.035776] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.035776] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 518.038991] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.038991] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 518.038991] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. [ 518.038991] env[61839]: ERROR nova.compute.manager [ 518.038991] env[61839]: Traceback (most recent call last): [ 518.038991] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 518.038991] env[61839]: listener.cb(fileno) [ 518.038991] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.038991] env[61839]: result = function(*args, **kwargs) [ 518.038991] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 518.038991] env[61839]: return func(*args, **kwargs) [ 518.038991] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.038991] env[61839]: raise e [ 518.038991] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.038991] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 518.038991] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.038991] env[61839]: created_port_ids = self._update_ports_for_instance( [ 518.038991] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.038991] env[61839]: with excutils.save_and_reraise_exception(): [ 518.038991] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.038991] env[61839]: self.force_reraise() [ 518.038991] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.038991] env[61839]: raise self.value [ 518.038991] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.038991] env[61839]: updated_port = self._update_port( [ 518.038991] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.038991] env[61839]: _ensure_no_port_binding_failure(port) [ 518.038991] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.038991] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 518.041108] env[61839]: nova.exception.PortBindingFailed: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. [ 518.041108] env[61839]: Removing descriptor: 15 [ 518.041108] env[61839]: ERROR nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Traceback (most recent call last): [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] yield resources [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self.driver.spawn(context, instance, image_meta, [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 518.041108] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] vm_ref = self.build_virtual_machine(instance, [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] vif_infos = vmwarevif.get_vif_info(self._session, [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] for vif in network_info: [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return self._sync_wrapper(fn, *args, **kwargs) [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self.wait() [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self[:] = self._gt.wait() [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return self._exit_event.wait() [ 518.041516] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] result = hub.switch() [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return self.greenlet.switch() [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] result = function(*args, **kwargs) [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return func(*args, **kwargs) [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] raise e [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] nwinfo = self.network_api.allocate_for_instance( [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.042015] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] created_port_ids = self._update_ports_for_instance( [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] with excutils.save_and_reraise_exception(): [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self.force_reraise() [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] raise self.value [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] updated_port = self._update_port( [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] _ensure_no_port_binding_failure(port) [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.042361] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] raise exception.PortBindingFailed(port_id=port['id']) [ 518.042666] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] nova.exception.PortBindingFailed: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. [ 518.042666] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] [ 518.042666] env[61839]: INFO nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Terminating instance [ 518.044850] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Acquiring lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.045503] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Acquired lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 518.045503] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 518.194885] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.198754] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 518.233956] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.234222] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.236761] env[61839]: INFO nova.compute.claims [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 518.448808] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.583097] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 518.701729] env[61839]: DEBUG nova.compute.utils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 518.703217] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 518.703398] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 518.712235] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.812219] env[61839]: ERROR nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. [ 518.812219] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 518.812219] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.812219] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 518.812219] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.812219] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 518.812219] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.812219] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 518.812219] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.812219] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 518.812219] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.812219] env[61839]: ERROR nova.compute.manager raise self.value [ 518.812219] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.812219] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 518.812219] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.812219] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 518.812718] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.812718] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 518.812718] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. [ 518.812718] env[61839]: ERROR nova.compute.manager [ 518.812718] env[61839]: Traceback (most recent call last): [ 518.812718] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 518.812718] env[61839]: listener.cb(fileno) [ 518.812718] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.812718] env[61839]: result = function(*args, **kwargs) [ 518.812718] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 518.812718] env[61839]: return func(*args, **kwargs) [ 518.812718] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.812718] env[61839]: raise e [ 518.812718] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.812718] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 518.812718] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.812718] env[61839]: created_port_ids = self._update_ports_for_instance( [ 518.812718] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.812718] env[61839]: with excutils.save_and_reraise_exception(): [ 518.812718] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.812718] env[61839]: self.force_reraise() [ 518.812718] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.812718] env[61839]: raise self.value [ 518.812718] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.812718] env[61839]: updated_port = self._update_port( [ 518.812718] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.812718] env[61839]: _ensure_no_port_binding_failure(port) [ 518.812718] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.812718] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 518.813604] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. [ 518.813604] env[61839]: Removing descriptor: 16 [ 518.813604] env[61839]: ERROR nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Traceback (most recent call last): [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] yield resources [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self.driver.spawn(context, instance, image_meta, [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 518.813604] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] vm_ref = self.build_virtual_machine(instance, [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] vif_infos = vmwarevif.get_vif_info(self._session, [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] for vif in network_info: [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return self._sync_wrapper(fn, *args, **kwargs) [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self.wait() [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self[:] = self._gt.wait() [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return self._exit_event.wait() [ 518.814046] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] result = hub.switch() [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return self.greenlet.switch() [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] result = function(*args, **kwargs) [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return func(*args, **kwargs) [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] raise e [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] nwinfo = self.network_api.allocate_for_instance( [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.814421] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] created_port_ids = self._update_ports_for_instance( [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] with excutils.save_and_reraise_exception(): [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self.force_reraise() [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] raise self.value [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] updated_port = self._update_port( [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] _ensure_no_port_binding_failure(port) [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.814790] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] raise exception.PortBindingFailed(port_id=port['id']) [ 518.815148] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] nova.exception.PortBindingFailed: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. [ 518.815148] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] [ 518.815148] env[61839]: INFO nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Terminating instance [ 518.818522] env[61839]: DEBUG nova.policy [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a26efdfcec784fd6a0be3c9f47b60b14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '410f61c02c224161a3229feb35c52c13', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 518.821894] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Acquiring lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.822104] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Acquired lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 518.822312] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 518.867517] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "41823a25-5ff2-4838-854d-5bada8e5daca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.867772] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "41823a25-5ff2-4838-854d-5bada8e5daca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.038916] env[61839]: DEBUG nova.compute.manager [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Received event network-changed-cb0e2d64-499b-40f4-8a14-5de5a5754bc3 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 519.039119] env[61839]: DEBUG nova.compute.manager [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Refreshing instance network info cache due to event network-changed-cb0e2d64-499b-40f4-8a14-5de5a5754bc3. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 519.039311] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] Acquiring lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.114439] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Acquiring lock "6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.114719] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Lock "6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.207401] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 519.219274] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Releasing lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.219274] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 519.219274] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 519.219274] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] Acquired lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.219274] env[61839]: DEBUG nova.network.neutron [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Refreshing network info cache for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 519.220373] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02b2448f-b925-472f-add3-57189c9fbc17 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.238434] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf654188-930b-48a3-8aff-9669c04393a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.256958] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Successfully created port: 93010f8d-86ce-4203-ac82-ff57a4f2d76b {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.281174] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95685d36-7efa-42a9-8722-5b90d6edbce5 could not be found. [ 519.281174] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 519.281174] env[61839]: INFO nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Took 0.06 seconds to destroy the instance on the hypervisor. [ 519.281461] env[61839]: DEBUG oslo.service.loopingcall [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 519.281964] env[61839]: DEBUG nova.compute.manager [-] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 519.282607] env[61839]: DEBUG nova.network.neutron [-] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 519.316575] env[61839]: DEBUG nova.network.neutron [-] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 519.363284] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 519.377103] env[61839]: DEBUG nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 519.386053] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04357345-384c-4994-87cb-78be2fbdfcd9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.393200] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f541636-a59b-4fcc-82a9-06dfe227c7e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.426042] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261e16e8-ea38-4787-b134-d00607967517 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.429935] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.436751] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1313c1f9-eb58-43d6-b68a-69541a0c4432 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.452313] env[61839]: DEBUG nova.compute.provider_tree [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 519.553088] env[61839]: DEBUG nova.compute.manager [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Received event network-changed-50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 519.553287] env[61839]: DEBUG nova.compute.manager [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Refreshing instance network info cache due to event network-changed-50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 519.553477] env[61839]: DEBUG oslo_concurrency.lockutils [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] Acquiring lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.618089] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 519.749283] env[61839]: DEBUG nova.network.neutron [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 519.813478] env[61839]: DEBUG nova.network.neutron [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.821803] env[61839]: DEBUG nova.network.neutron [-] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.906033] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.934216] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Releasing lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.935034] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 519.935034] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 519.935209] env[61839]: DEBUG oslo_concurrency.lockutils [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] Acquired lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.935374] env[61839]: DEBUG nova.network.neutron [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Refreshing network info cache for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 519.936764] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a348625-f465-4178-a231-d244c11ef760 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.947772] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3804dc8-9c54-4125-8e5b-143ca4ab892b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.960537] env[61839]: DEBUG nova.scheduler.client.report [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 519.976547] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c258dbf2-be81-40e5-a11a-03dee332d3b6 could not be found. [ 519.976547] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 519.976684] env[61839]: INFO nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 519.977262] env[61839]: DEBUG oslo.service.loopingcall [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 519.977540] env[61839]: DEBUG nova.compute.manager [-] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 519.977645] env[61839]: DEBUG nova.network.neutron [-] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 519.995406] env[61839]: DEBUG nova.network.neutron [-] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.145636] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.221505] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 520.254320] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 520.254570] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 520.255304] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.255516] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 520.255953] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.256157] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 520.256376] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 520.256606] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 520.256783] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 520.256965] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 520.257222] env[61839]: DEBUG nova.virt.hardware [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 520.259817] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499ba298-5b31-4afb-80dd-c2d39286ea63 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.270317] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6aed75-8184-4068-ac5d-b1c48ea6179a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.316975] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5534524-1406-450d-9834-206477e3557d req-a496b531-2b96-4069-b5c5-a1d316369882 service nova] Releasing lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.331968] env[61839]: INFO nova.compute.manager [-] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Took 1.05 seconds to deallocate network for instance. [ 520.334677] env[61839]: DEBUG nova.compute.claims [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 520.334677] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.469376] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.470531] env[61839]: DEBUG nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 520.475801] env[61839]: DEBUG nova.network.neutron [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.478675] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.029s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.482614] env[61839]: INFO nova.compute.claims [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 520.498084] env[61839]: DEBUG nova.network.neutron [-] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.586603] env[61839]: DEBUG nova.network.neutron [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.625470] env[61839]: ERROR nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. [ 520.625470] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 520.625470] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 520.625470] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 520.625470] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.625470] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 520.625470] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.625470] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 520.625470] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.625470] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 520.625470] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.625470] env[61839]: ERROR nova.compute.manager raise self.value [ 520.625470] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.625470] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 520.625470] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.625470] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 520.626112] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.626112] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 520.626112] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. [ 520.626112] env[61839]: ERROR nova.compute.manager [ 520.626112] env[61839]: Traceback (most recent call last): [ 520.626112] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 520.626112] env[61839]: listener.cb(fileno) [ 520.626112] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.626112] env[61839]: result = function(*args, **kwargs) [ 520.626112] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 520.626112] env[61839]: return func(*args, **kwargs) [ 520.626112] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 520.626112] env[61839]: raise e [ 520.626112] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 520.626112] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 520.626112] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.626112] env[61839]: created_port_ids = self._update_ports_for_instance( [ 520.626112] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.626112] env[61839]: with excutils.save_and_reraise_exception(): [ 520.626112] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.626112] env[61839]: self.force_reraise() [ 520.626112] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.626112] env[61839]: raise self.value [ 520.626112] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.626112] env[61839]: updated_port = self._update_port( [ 520.626112] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.626112] env[61839]: _ensure_no_port_binding_failure(port) [ 520.626112] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.626112] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 520.627391] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. [ 520.627391] env[61839]: Removing descriptor: 17 [ 520.627391] env[61839]: ERROR nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] Traceback (most recent call last): [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] yield resources [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self.driver.spawn(context, instance, image_meta, [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 520.627391] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] vm_ref = self.build_virtual_machine(instance, [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] for vif in network_info: [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return self._sync_wrapper(fn, *args, **kwargs) [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self.wait() [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self[:] = self._gt.wait() [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return self._exit_event.wait() [ 520.627781] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] result = hub.switch() [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return self.greenlet.switch() [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] result = function(*args, **kwargs) [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return func(*args, **kwargs) [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] raise e [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] nwinfo = self.network_api.allocate_for_instance( [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.628118] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] created_port_ids = self._update_ports_for_instance( [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] with excutils.save_and_reraise_exception(): [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self.force_reraise() [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] raise self.value [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] updated_port = self._update_port( [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] _ensure_no_port_binding_failure(port) [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.628424] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] raise exception.PortBindingFailed(port_id=port['id']) [ 520.628752] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] nova.exception.PortBindingFailed: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. [ 520.628752] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] [ 520.628752] env[61839]: INFO nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Terminating instance [ 520.629607] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Acquiring lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 520.630514] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Acquired lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 520.630514] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 520.984373] env[61839]: DEBUG nova.compute.utils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 520.990025] env[61839]: DEBUG nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Not allocating networking since 'none' was specified. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 521.006033] env[61839]: INFO nova.compute.manager [-] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Took 1.03 seconds to deallocate network for instance. [ 521.008226] env[61839]: DEBUG nova.compute.claims [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 521.008409] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.090858] env[61839]: DEBUG oslo_concurrency.lockutils [req-4f9006f3-74d6-46d5-8ed7-c44e9f456375 req-0c4e469b-7362-4654-ae52-e0ff3e293c78 service nova] Releasing lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 521.156481] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 521.289097] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.488767] env[61839]: DEBUG nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 521.519730] env[61839]: DEBUG nova.compute.manager [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Received event network-vif-deleted-cb0e2d64-499b-40f4-8a14-5de5a5754bc3 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 521.519940] env[61839]: DEBUG nova.compute.manager [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] [instance: be976871-706d-41e1-9423-55ff251a52e9] Received event network-changed-93010f8d-86ce-4203-ac82-ff57a4f2d76b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 521.520435] env[61839]: DEBUG nova.compute.manager [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] [instance: be976871-706d-41e1-9423-55ff251a52e9] Refreshing instance network info cache due to event network-changed-93010f8d-86ce-4203-ac82-ff57a4f2d76b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 521.520435] env[61839]: DEBUG oslo_concurrency.lockutils [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] Acquiring lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.635217] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1982a55-7e6c-4dc1-86b7-de9ba418a2db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.645852] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff376f6-0cd5-4312-a4ea-60932ba41d27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.678772] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5569fc29-6cf9-48bc-8d38-7205c88cd1a0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.687098] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737f4ccf-92fc-4dcc-b974-3840c562de9b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.703463] env[61839]: DEBUG nova.compute.provider_tree [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.795374] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Releasing lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 521.795374] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 521.795374] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 521.795374] env[61839]: DEBUG oslo_concurrency.lockutils [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] Acquired lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.795374] env[61839]: DEBUG nova.network.neutron [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] [instance: be976871-706d-41e1-9423-55ff251a52e9] Refreshing network info cache for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 521.795642] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-725a5794-772f-4023-8d05-9e7ea46b15e1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.808139] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d141a6a8-12eb-490d-8175-5379a28b2e5b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.831924] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance be976871-706d-41e1-9423-55ff251a52e9 could not be found. [ 521.832179] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 521.832358] env[61839]: INFO nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 521.832596] env[61839]: DEBUG oslo.service.loopingcall [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 521.832792] env[61839]: DEBUG nova.compute.manager [-] [instance: be976871-706d-41e1-9423-55ff251a52e9] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 521.832880] env[61839]: DEBUG nova.network.neutron [-] [instance: be976871-706d-41e1-9423-55ff251a52e9] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 521.857667] env[61839]: DEBUG nova.network.neutron [-] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.207232] env[61839]: DEBUG nova.scheduler.client.report [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 522.316702] env[61839]: DEBUG nova.network.neutron [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.355747] env[61839]: DEBUG nova.compute.manager [req-7f20fa46-491f-4d64-b1a5-d3aa035e7df5 req-9ed8378a-9222-4a81-9b49-87990ee17da3 service nova] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Received event network-vif-deleted-50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 522.360813] env[61839]: DEBUG nova.network.neutron [-] [instance: be976871-706d-41e1-9423-55ff251a52e9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.430095] env[61839]: DEBUG nova.network.neutron [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] [instance: be976871-706d-41e1-9423-55ff251a52e9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.501135] env[61839]: DEBUG nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 522.528342] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 522.528612] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 522.528822] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 522.529325] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 522.529325] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 522.529325] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 522.529476] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 522.529588] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 522.529746] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 522.530025] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 522.530089] env[61839]: DEBUG nova.virt.hardware [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 522.530986] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e23bcf-b90f-44a4-bc22-e875c9adbdec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.540047] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692fe122-0911-41f1-8ca5-f8c6a1ef6d51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.556651] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 522.566366] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 522.566790] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75936c66-bb5f-410e-b210-a111bd282499 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.582499] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Created folder: OpenStack in parent group-v4. [ 522.582731] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Creating folder: Project (388f5b9268964aadb023b7eeafe35e99). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 522.582968] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8aabd0f0-47c5-402e-a4ed-f6ec6b6c9882 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.593477] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Created folder: Project (388f5b9268964aadb023b7eeafe35e99) in parent group-v281288. [ 522.593672] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Creating folder: Instances. Parent ref: group-v281289. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 522.593910] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-baeb77fd-11ee-44bc-a763-bb8267b4d394 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.605481] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Created folder: Instances in parent group-v281289. [ 522.605830] env[61839]: DEBUG oslo.service.loopingcall [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.605908] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 522.606354] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2737a2f-67d5-4341-beb3-18863c96dea1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.622988] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 522.622988] env[61839]: value = "task-1314214" [ 522.622988] env[61839]: _type = "Task" [ 522.622988] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 522.631694] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314214, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 522.714664] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.715381] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 522.717987] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.813s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.720347] env[61839]: INFO nova.compute.claims [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.820988] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Acquiring lock "9594f132-d558-4c75-872f-b1d1b7c08f66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.822404] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Lock "9594f132-d558-4c75-872f-b1d1b7c08f66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.863965] env[61839]: INFO nova.compute.manager [-] [instance: be976871-706d-41e1-9423-55ff251a52e9] Took 1.03 seconds to deallocate network for instance. [ 522.866712] env[61839]: DEBUG nova.compute.claims [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 522.867610] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.933477] env[61839]: DEBUG oslo_concurrency.lockutils [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] Releasing lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.934108] env[61839]: DEBUG nova.compute.manager [req-2e52ae8b-28e6-43b7-937c-493f7848d639 req-4cb58d6c-8978-4204-a5ef-2eccab1190ef service nova] [instance: be976871-706d-41e1-9423-55ff251a52e9] Received event network-vif-deleted-93010f8d-86ce-4203-ac82-ff57a4f2d76b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 523.133126] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314214, 'name': CreateVM_Task, 'duration_secs': 0.343171} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 523.133293] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 523.134377] env[61839]: DEBUG oslo_vmware.service [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc9210e-5fac-4dd7-8a2f-c9bfffb6af9b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.139785] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.139963] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.140583] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 523.140801] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f93cb701-608e-413d-b2d4-435611b504f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.145056] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 523.145056] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527b070e-c256-e50c-af9e-e67b12071f4d" [ 523.145056] env[61839]: _type = "Task" [ 523.145056] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.152859] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527b070e-c256-e50c-af9e-e67b12071f4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.224466] env[61839]: DEBUG nova.compute.utils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 523.225980] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 523.227513] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 523.295456] env[61839]: DEBUG nova.policy [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cc52944f280461695fc2cd4393c280c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8c7416213b84de48dca35ce1a0538bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 523.325536] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 523.665654] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.666168] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 523.666461] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.666627] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.667773] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 523.667773] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95c47c15-bfca-4861-892a-09b1c8fd7a3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.689639] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 523.689900] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 523.690845] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d6b987-4b4e-423f-8c20-9a49eebd0cea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.699634] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfd19f3e-c7e0-44f0-a0f9-f440393fcf48 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.707443] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 523.707443] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d973e9-bc20-1b50-b5e5-7d6f75851ac4" [ 523.707443] env[61839]: _type = "Task" [ 523.707443] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.716806] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d973e9-bc20-1b50-b5e5-7d6f75851ac4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.731409] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 523.847269] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.894541] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8588b55b-5d9a-402d-9be1-e2fa2a83198f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.905208] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c45df6-51f8-457b-9719-03094ae05f87 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.940587] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b341c394-3bed-4867-a596-fca202d79d04 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.949398] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302b81d7-ff43-4662-905e-6ac5924a1110 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.967715] env[61839]: DEBUG nova.compute.provider_tree [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 523.970239] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Successfully created port: b7ee1667-a38f-429e-acb4-c7171559db2b {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 524.217355] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Preparing fetch location {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 524.217611] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Creating directory with path [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 524.217852] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee01947f-e7be-49af-943c-d6e67c3ff177 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.252933] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Created directory with path [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 524.253485] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Fetch image to [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 524.253676] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Downloading image file data e497cc62-282a-4a70-9770-22d80d8a1013 to [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk on the data store datastore1 {{(pid=61839) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 524.254680] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae685f5-4ceb-4058-991f-65d89be9d56c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.265967] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e42857-1eb7-4715-a6ce-688bedb0f1e1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.280330] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b6e971-3567-4c09-ae9c-2d54fb875d12 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.326897] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6708750d-bbf4-457a-93a5-d1a41262bac7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.336402] env[61839]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-528ad709-8229-46e8-ae00-b505a3a6b6d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.429513] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Downloading image file data e497cc62-282a-4a70-9770-22d80d8a1013 to the data store datastore1 {{(pid=61839) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 524.473581] env[61839]: DEBUG nova.scheduler.client.report [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 524.508784] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61839) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 524.750034] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 524.785245] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 524.785538] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 524.785752] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 524.785976] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 524.786177] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 524.786354] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 524.786717] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 524.786784] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 524.786964] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 524.787195] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 524.787361] env[61839]: DEBUG nova.virt.hardware [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 524.788379] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadfdcc9-f215-4eae-bdd6-d551ae9faaf3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.800992] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088ef1de-e880-4d17-973e-ca3096d04e7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.986218] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.987348] env[61839]: DEBUG nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 524.990916] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.845s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.995893] env[61839]: INFO nova.compute.claims [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 525.006799] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 525.154250] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Completed reading data from the image iterator. {{(pid=61839) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 525.154515] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 525.299483] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Downloaded image file data e497cc62-282a-4a70-9770-22d80d8a1013 to vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk on the data store datastore1 {{(pid=61839) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 525.301906] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Caching image {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 525.302539] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Copying Virtual Disk [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk to [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 525.302723] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58b95ed3-80ee-4988-8e6c-a29fbf095c75 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.312400] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 525.312400] env[61839]: value = "task-1314215" [ 525.312400] env[61839]: _type = "Task" [ 525.312400] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.321671] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314215, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.505603] env[61839]: DEBUG nova.compute.utils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 525.510113] env[61839]: DEBUG nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Not allocating networking since 'none' was specified. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 525.516551] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Getting list of instances from cluster (obj){ [ 525.516551] env[61839]: value = "domain-c8" [ 525.516551] env[61839]: _type = "ClusterComputeResource" [ 525.516551] env[61839]: } {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 525.517544] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda49870-d919-4694-98f3-2d114d37f839 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.530624] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Got total of 1 instances {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 525.530849] env[61839]: WARNING nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] While synchronizing instance power states, found 6 instances in the database and 1 instances on the hypervisor. [ 525.530962] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid 95685d36-7efa-42a9-8722-5b90d6edbce5 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 525.531360] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid c258dbf2-be81-40e5-a11a-03dee332d3b6 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 525.531545] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid be976871-706d-41e1-9423-55ff251a52e9 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 525.531736] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid 211e8267-3c33-42c8-852f-1c20d7987453 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 525.531964] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid 570fb8fe-391a-4f1b-be51-17979e9fb049 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 525.532187] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid 41823a25-5ff2-4838-854d-5bada8e5daca {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 525.532520] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "95685d36-7efa-42a9-8722-5b90d6edbce5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.532747] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "c258dbf2-be81-40e5-a11a-03dee332d3b6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.532912] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "be976871-706d-41e1-9423-55ff251a52e9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.533163] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "211e8267-3c33-42c8-852f-1c20d7987453" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.533307] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "570fb8fe-391a-4f1b-be51-17979e9fb049" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.533494] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "41823a25-5ff2-4838-854d-5bada8e5daca" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.533664] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 525.533877] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 525.534065] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 525.824681] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314215, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.012448] env[61839]: DEBUG nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 526.037537] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.263913] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6726adb6-f8f2-463b-a377-cd64f8bc64f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.273119] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367fb48a-9f4b-4fd5-afea-0c7cdc767937 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.308669] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab93cd4-2253-4c1c-bcb5-31ab878d7e18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.320259] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9c9363-21bd-470b-89e7-79981ba6e042 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.331196] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314215, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69466} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.332899] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Copied Virtual Disk [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk to [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 526.333196] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleting the datastore file [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 526.333387] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a721bb0a-3f90-40f4-adb2-91bf7326c509 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.345745] env[61839]: DEBUG nova.compute.provider_tree [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.349281] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 526.349281] env[61839]: value = "task-1314216" [ 526.349281] env[61839]: _type = "Task" [ 526.349281] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.357277] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.849830] env[61839]: DEBUG nova.scheduler.client.report [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 526.863587] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027033} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.864593] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 526.864593] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Moving file from [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988/e497cc62-282a-4a70-9770-22d80d8a1013 to [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013. {{(pid=61839) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 526.864593] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-e91752b8-e955-4829-a797-f45e8d1258c6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.871997] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 526.871997] env[61839]: value = "task-1314217" [ 526.871997] env[61839]: _type = "Task" [ 526.871997] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.880983] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314217, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.034237] env[61839]: DEBUG nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 527.071266] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.071266] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.071266] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.072727] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.073371] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.073627] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.073949] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.074415] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.074705] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.074973] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.075267] env[61839]: DEBUG nova.virt.hardware [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.077228] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922907e6-3f72-465a-bc69-b53f64d8309b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.083525] env[61839]: ERROR nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. [ 527.083525] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 527.083525] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.083525] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 527.083525] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 527.083525] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 527.083525] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 527.083525] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 527.083525] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.083525] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 527.083525] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.083525] env[61839]: ERROR nova.compute.manager raise self.value [ 527.083525] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 527.083525] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 527.083525] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.083525] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 527.084031] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.084031] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 527.084031] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. [ 527.084031] env[61839]: ERROR nova.compute.manager [ 527.084917] env[61839]: Traceback (most recent call last): [ 527.084917] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 527.084917] env[61839]: listener.cb(fileno) [ 527.084917] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.084917] env[61839]: result = function(*args, **kwargs) [ 527.084917] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 527.084917] env[61839]: return func(*args, **kwargs) [ 527.084917] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 527.084917] env[61839]: raise e [ 527.084917] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.084917] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 527.084917] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 527.084917] env[61839]: created_port_ids = self._update_ports_for_instance( [ 527.084917] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 527.084917] env[61839]: with excutils.save_and_reraise_exception(): [ 527.084917] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.084917] env[61839]: self.force_reraise() [ 527.084917] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.084917] env[61839]: raise self.value [ 527.084917] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 527.084917] env[61839]: updated_port = self._update_port( [ 527.084917] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.084917] env[61839]: _ensure_no_port_binding_failure(port) [ 527.084917] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.084917] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 527.084917] env[61839]: nova.exception.PortBindingFailed: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. [ 527.084917] env[61839]: Removing descriptor: 17 [ 527.086141] env[61839]: ERROR nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Traceback (most recent call last): [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] yield resources [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self.driver.spawn(context, instance, image_meta, [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self._vmops.spawn(context, instance, image_meta, injected_files, [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] vm_ref = self.build_virtual_machine(instance, [ 527.086141] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] vif_infos = vmwarevif.get_vif_info(self._session, [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] for vif in network_info: [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return self._sync_wrapper(fn, *args, **kwargs) [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self.wait() [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self[:] = self._gt.wait() [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return self._exit_event.wait() [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 527.086455] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] result = hub.switch() [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return self.greenlet.switch() [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] result = function(*args, **kwargs) [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return func(*args, **kwargs) [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] raise e [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] nwinfo = self.network_api.allocate_for_instance( [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] created_port_ids = self._update_ports_for_instance( [ 527.086768] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] with excutils.save_and_reraise_exception(): [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self.force_reraise() [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] raise self.value [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] updated_port = self._update_port( [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] _ensure_no_port_binding_failure(port) [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] raise exception.PortBindingFailed(port_id=port['id']) [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] nova.exception.PortBindingFailed: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. [ 527.088839] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] [ 527.090459] env[61839]: INFO nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Terminating instance [ 527.090459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Acquiring lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.090459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Acquired lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.090459] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 527.092929] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c767f7ce-16eb-439b-b6ad-c7cfb5ae1924 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.119272] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 527.124074] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Creating folder: Project (1f26d85756a84a14935f831a42ad4065). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 527.125037] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8b3e880-66c8-47ed-a472-e8e9193251a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.136387] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Created folder: Project (1f26d85756a84a14935f831a42ad4065) in parent group-v281288. [ 527.136387] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Creating folder: Instances. Parent ref: group-v281292. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 527.136387] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3321db2a-c205-4327-80f9-bd84a15d8e9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.146976] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Created folder: Instances in parent group-v281292. [ 527.146976] env[61839]: DEBUG oslo.service.loopingcall [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 527.146976] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 527.146976] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4643b7c-3a73-4335-9249-423a806c1117 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.165849] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 527.165849] env[61839]: value = "task-1314220" [ 527.165849] env[61839]: _type = "Task" [ 527.165849] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.175323] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314220, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.227371] env[61839]: DEBUG nova.compute.manager [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Received event network-changed-b7ee1667-a38f-429e-acb4-c7171559db2b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 527.227568] env[61839]: DEBUG nova.compute.manager [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Refreshing instance network info cache due to event network-changed-b7ee1667-a38f-429e-acb4-c7171559db2b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 527.227748] env[61839]: DEBUG oslo_concurrency.lockutils [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] Acquiring lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.359235] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.359235] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 527.364545] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.030s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.384705] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314217, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.033354} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.385019] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] File moved {{(pid=61839) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 527.385216] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Cleaning up location [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 527.385385] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleting the datastore file [datastore1] vmware_temp/5c3a9ecd-3ffc-4643-a597-3592ead08988 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 527.385892] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e20c2a05-1fa5-4617-a6f9-ae5b1f9019aa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.394338] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 527.394338] env[61839]: value = "task-1314221" [ 527.394338] env[61839]: _type = "Task" [ 527.394338] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.404097] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.644577] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.677886] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314220, 'name': CreateVM_Task, 'duration_secs': 0.307532} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.678068] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 527.678952] env[61839]: DEBUG oslo_vmware.service [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a836e4-8d01-4553-8a7c-c23b6f914248 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.685254] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.686232] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.686232] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 527.690062] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-537c560b-d4e0-4493-a9dd-23fb06b6bc94 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.697239] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 527.697239] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522c9261-e94c-8080-c0e1-9a5d1882332c" [ 527.697239] env[61839]: _type = "Task" [ 527.697239] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.709303] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522c9261-e94c-8080-c0e1-9a5d1882332c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.754881] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.875147] env[61839]: DEBUG nova.compute.utils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 527.885305] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 527.885305] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 527.910555] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024896} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.913456] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 527.914378] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e19de0b-3a81-48ec-8a3b-e420292cf857 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.922802] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 527.922802] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520f1285-29f8-1d7f-340f-615324f20aa1" [ 527.922802] env[61839]: _type = "Task" [ 527.922802] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.934221] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520f1285-29f8-1d7f-340f-615324f20aa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.988296] env[61839]: DEBUG nova.policy [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a42ad91ec0f94bc08362a221b523606c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cfbe613b17d47b6a4e7b2599b66d152', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 528.051917] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9222f2-65c7-451e-8c54-cf59957c80b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.062741] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a77fa52-f95f-446f-ad0e-ad1242081682 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.104158] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bcd316-cf5a-4f3f-a499-4a9fdf84d716 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.112159] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd67a64-a251-4059-a39e-384814bff91f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.125383] env[61839]: DEBUG nova.compute.provider_tree [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.209282] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.209481] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 528.209721] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.209867] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.210055] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 528.210365] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec0439b7-ec43-4a8d-84f1-618a55cc48d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.219021] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 528.219021] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 528.219254] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c0331f-5383-4138-ac20-d36774965477 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.226764] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ec3b72e-697c-4d4e-8163-575fd892d9fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.231760] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 528.231760] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ee4a0a-f2dc-7722-4a69-c37d184f4a13" [ 528.231760] env[61839]: _type = "Task" [ 528.231760] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.240176] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ee4a0a-f2dc-7722-4a69-c37d184f4a13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.257071] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Releasing lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.257579] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 528.257801] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 528.257990] env[61839]: DEBUG oslo_concurrency.lockutils [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] Acquired lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.258186] env[61839]: DEBUG nova.network.neutron [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Refreshing network info cache for port b7ee1667-a38f-429e-acb4-c7171559db2b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 528.259868] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ce9c1ae-4eb7-44d5-994c-720731bdd263 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.268398] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20285548-4de1-4283-9ed1-56f7218a40a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.295829] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 570fb8fe-391a-4f1b-be51-17979e9fb049 could not be found. [ 528.296085] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 528.296274] env[61839]: INFO nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Took 0.04 seconds to destroy the instance on the hypervisor. [ 528.296519] env[61839]: DEBUG oslo.service.loopingcall [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 528.296722] env[61839]: DEBUG nova.compute.manager [-] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 528.296817] env[61839]: DEBUG nova.network.neutron [-] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 528.335726] env[61839]: DEBUG nova.network.neutron [-] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.385647] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 528.432441] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520f1285-29f8-1d7f-340f-615324f20aa1, 'name': SearchDatastore_Task, 'duration_secs': 0.017003} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.434493] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.434493] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 528.434493] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-242ae3bf-6ded-4223-9d8b-60e8395f1462 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.441751] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 528.441751] env[61839]: value = "task-1314222" [ 528.441751] env[61839]: _type = "Task" [ 528.441751] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.450180] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314222, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.485941] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Successfully created port: 16fa8de1-60b8-4e77-bc49-e40967dff057 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 528.629219] env[61839]: DEBUG nova.scheduler.client.report [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 528.745692] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Preparing fetch location {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 528.747023] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Creating directory with path [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 528.747023] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-772ab87f-20ff-47f6-8403-68ff0df56034 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.775804] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Created directory with path [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 528.776577] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Fetch image to [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 528.777639] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Downloading image file data e497cc62-282a-4a70-9770-22d80d8a1013 to [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk on the data store datastore2 {{(pid=61839) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 528.781022] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c9dcb2-3fde-4877-8418-e983575087b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.792473] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9537f0c-9637-4ba5-ac97-de54fc478449 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.805224] env[61839]: DEBUG nova.network.neutron [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.808892] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4b2076-3197-408b-801f-b5a711393461 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.845259] env[61839]: DEBUG nova.network.neutron [-] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.852077] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32650a7-ac2a-4404-882a-4713e442fe89 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.860056] env[61839]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-30770001-9346-4cb4-8948-5542f1a0d132 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.953297] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314222, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498328} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.953698] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 528.953784] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 528.954054] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b7fcf1f-d822-446a-b43c-d0955d2a93ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.958507] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Downloading image file data e497cc62-282a-4a70-9770-22d80d8a1013 to the data store datastore2 {{(pid=61839) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 528.963174] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 528.963174] env[61839]: value = "task-1314223" [ 528.963174] env[61839]: _type = "Task" [ 528.963174] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.972238] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314223, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.985157] env[61839]: DEBUG nova.network.neutron [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.039787] env[61839]: DEBUG oslo_vmware.rw_handles [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61839) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 529.136310] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.772s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.137463] env[61839]: ERROR nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Traceback (most recent call last): [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self.driver.spawn(context, instance, image_meta, [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] vm_ref = self.build_virtual_machine(instance, [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.137463] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] for vif in network_info: [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return self._sync_wrapper(fn, *args, **kwargs) [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self.wait() [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self[:] = self._gt.wait() [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return self._exit_event.wait() [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] result = hub.switch() [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.137940] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return self.greenlet.switch() [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] result = function(*args, **kwargs) [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] return func(*args, **kwargs) [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] raise e [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] nwinfo = self.network_api.allocate_for_instance( [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] created_port_ids = self._update_ports_for_instance( [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] with excutils.save_and_reraise_exception(): [ 529.138588] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] self.force_reraise() [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] raise self.value [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] updated_port = self._update_port( [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] _ensure_no_port_binding_failure(port) [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] raise exception.PortBindingFailed(port_id=port['id']) [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] nova.exception.PortBindingFailed: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. [ 529.138961] env[61839]: ERROR nova.compute.manager [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] [ 529.139299] env[61839]: DEBUG nova.compute.utils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 529.141929] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.133s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.147423] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Build of instance 95685d36-7efa-42a9-8722-5b90d6edbce5 was re-scheduled: Binding failed for port cb0e2d64-499b-40f4-8a14-5de5a5754bc3, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 529.147958] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 529.148223] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Acquiring lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.148366] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Acquired lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.148548] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 529.228888] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Acquiring lock "d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.228888] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Lock "d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.350795] env[61839]: INFO nova.compute.manager [-] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Took 1.05 seconds to deallocate network for instance. [ 529.353258] env[61839]: DEBUG nova.compute.claims [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 529.353599] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.402323] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 529.434512] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 529.434619] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 529.434832] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 529.434896] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 529.438421] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 529.438768] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 529.439058] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 529.439291] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 529.439512] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 529.439737] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 529.439966] env[61839]: DEBUG nova.virt.hardware [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 529.441068] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d472afa5-f49c-4030-8944-3ed6e69a189c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.453020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1f0019-9f1d-49b8-a40d-fc7c5ec73cc1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.490140] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314223, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061772} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.490140] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 529.490140] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18a9bd7-5bdc-4698-9e35-0006465d4a80 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.493879] env[61839]: DEBUG oslo_concurrency.lockutils [req-bcfac698-b884-4fa6-8517-494d2de31b1e req-138e2af0-bce5-4293-a00e-3935637944e1 service nova] Releasing lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.518256] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 529.523720] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36b71064-657a-4724-8845-160ebc8ab961 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.551274] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 529.551274] env[61839]: value = "task-1314224" [ 529.551274] env[61839]: _type = "Task" [ 529.551274] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.565132] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314224, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.711208] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.713912] env[61839]: DEBUG oslo_vmware.rw_handles [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Completed reading data from the image iterator. {{(pid=61839) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 529.714142] env[61839]: DEBUG oslo_vmware.rw_handles [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 529.731324] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 529.850660] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Downloaded image file data e497cc62-282a-4a70-9770-22d80d8a1013 to vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk on the data store datastore2 {{(pid=61839) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 529.853684] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Caching image {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 529.853684] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Copying Virtual Disk [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk to [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 529.853684] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae8376e6-0eb8-4ede-9863-6db43e315b67 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.861517] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 529.861517] env[61839]: value = "task-1314225" [ 529.861517] env[61839]: _type = "Task" [ 529.861517] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.871289] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.916815] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3794c4-4cf6-47ae-ae65-d37ead122d50 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.920141] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.926365] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e61c01a-ecb7-4dbe-b33b-c386468f05e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.971053] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121d6975-ce9c-4791-b904-5fecc75b1655 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.981690] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379d2174-4da9-41df-a7c3-7c36ca8754b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.000964] env[61839]: DEBUG nova.compute.provider_tree [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.068816] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314224, 'name': ReconfigVM_Task, 'duration_secs': 0.358052} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.071258] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 530.074767] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd9c7252-f43a-4ae6-99ae-3717985d1216 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.078715] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 530.078715] env[61839]: value = "task-1314226" [ 530.078715] env[61839]: _type = "Task" [ 530.078715] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.093394] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314226, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.269547] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.373053] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314225, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.406620] env[61839]: DEBUG nova.compute.manager [req-5bcbb152-ae9a-4433-a28d-d6f695203839 req-994734ec-447d-477d-8d6d-d9a5a895204c service nova] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Received event network-vif-deleted-b7ee1667-a38f-429e-acb4-c7171559db2b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 530.423538] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Releasing lock "refresh_cache-95685d36-7efa-42a9-8722-5b90d6edbce5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.423847] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 530.424396] env[61839]: DEBUG nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.424630] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 530.463108] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.507799] env[61839]: DEBUG nova.scheduler.client.report [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 530.593672] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314226, 'name': Rename_Task, 'duration_secs': 0.140075} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.593672] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 530.593672] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfcd4932-c7be-4259-aa5b-425fb29fe342 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.600021] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 530.600021] env[61839]: value = "task-1314227" [ 530.600021] env[61839]: _type = "Task" [ 530.600021] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.608773] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314227, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.878254] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670844} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.878512] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Copied Virtual Disk [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk to [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 530.878736] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Deleting the datastore file [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013/tmp-sparse.vmdk {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 530.879022] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-684ce090-347b-4099-98f0-fe0e0ef92a11 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.887071] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 530.887071] env[61839]: value = "task-1314228" [ 530.887071] env[61839]: _type = "Task" [ 530.887071] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.896425] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.965855] env[61839]: DEBUG nova.network.neutron [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.016483] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.874s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.017560] env[61839]: ERROR nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Traceback (most recent call last): [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self.driver.spawn(context, instance, image_meta, [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] vm_ref = self.build_virtual_machine(instance, [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] vif_infos = vmwarevif.get_vif_info(self._session, [ 531.017560] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] for vif in network_info: [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return self._sync_wrapper(fn, *args, **kwargs) [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self.wait() [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self[:] = self._gt.wait() [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return self._exit_event.wait() [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] result = hub.switch() [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 531.018476] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return self.greenlet.switch() [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] result = function(*args, **kwargs) [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] return func(*args, **kwargs) [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] raise e [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] nwinfo = self.network_api.allocate_for_instance( [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] created_port_ids = self._update_ports_for_instance( [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] with excutils.save_and_reraise_exception(): [ 531.018847] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] self.force_reraise() [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] raise self.value [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] updated_port = self._update_port( [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] _ensure_no_port_binding_failure(port) [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] raise exception.PortBindingFailed(port_id=port['id']) [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] nova.exception.PortBindingFailed: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. [ 531.019226] env[61839]: ERROR nova.compute.manager [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] [ 531.019544] env[61839]: DEBUG nova.compute.utils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 531.021273] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.153s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.030680] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Build of instance c258dbf2-be81-40e5-a11a-03dee332d3b6 was re-scheduled: Binding failed for port 50fc3cdf-49e3-4a6a-8a5c-8068eb6cd193, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 531.030822] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 531.031010] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Acquiring lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.031161] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Acquired lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.031323] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 531.114950] env[61839]: DEBUG oslo_vmware.api [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314227, 'name': PowerOnVM_Task, 'duration_secs': 0.475403} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.115265] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 531.115450] env[61839]: INFO nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Took 8.62 seconds to spawn the instance on the hypervisor. [ 531.115709] env[61839]: DEBUG nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 531.116830] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539a8c99-56a3-4746-a1b9-036d28e50dbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.403943] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023504} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.403943] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 531.403943] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Moving file from [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5/e497cc62-282a-4a70-9770-22d80d8a1013 to [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013. {{(pid=61839) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 531.403943] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-458fc4da-dec7-452c-a074-2d9024a0e841 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.409523] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 531.409523] env[61839]: value = "task-1314229" [ 531.409523] env[61839]: _type = "Task" [ 531.409523] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.418412] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314229, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.469282] env[61839]: INFO nova.compute.manager [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] Took 1.04 seconds to deallocate network for instance. [ 531.494752] env[61839]: ERROR nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. [ 531.494752] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 531.494752] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.494752] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 531.494752] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.494752] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 531.494752] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.494752] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 531.494752] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.494752] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 531.494752] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.494752] env[61839]: ERROR nova.compute.manager raise self.value [ 531.494752] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.494752] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 531.494752] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.494752] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 531.495857] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.495857] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 531.495857] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. [ 531.495857] env[61839]: ERROR nova.compute.manager [ 531.495857] env[61839]: Traceback (most recent call last): [ 531.495857] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 531.495857] env[61839]: listener.cb(fileno) [ 531.495857] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 531.495857] env[61839]: result = function(*args, **kwargs) [ 531.495857] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 531.495857] env[61839]: return func(*args, **kwargs) [ 531.495857] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 531.495857] env[61839]: raise e [ 531.495857] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.495857] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 531.495857] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.495857] env[61839]: created_port_ids = self._update_ports_for_instance( [ 531.495857] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.495857] env[61839]: with excutils.save_and_reraise_exception(): [ 531.495857] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.495857] env[61839]: self.force_reraise() [ 531.495857] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.495857] env[61839]: raise self.value [ 531.495857] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.495857] env[61839]: updated_port = self._update_port( [ 531.495857] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.495857] env[61839]: _ensure_no_port_binding_failure(port) [ 531.495857] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.495857] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 531.497489] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. [ 531.497489] env[61839]: Removing descriptor: 17 [ 531.497489] env[61839]: ERROR nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Traceback (most recent call last): [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] yield resources [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self.driver.spawn(context, instance, image_meta, [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 531.497489] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] vm_ref = self.build_virtual_machine(instance, [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] vif_infos = vmwarevif.get_vif_info(self._session, [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] for vif in network_info: [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return self._sync_wrapper(fn, *args, **kwargs) [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self.wait() [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self[:] = self._gt.wait() [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return self._exit_event.wait() [ 531.497961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] result = hub.switch() [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return self.greenlet.switch() [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] result = function(*args, **kwargs) [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return func(*args, **kwargs) [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] raise e [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] nwinfo = self.network_api.allocate_for_instance( [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.498335] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] created_port_ids = self._update_ports_for_instance( [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] with excutils.save_and_reraise_exception(): [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self.force_reraise() [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] raise self.value [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] updated_port = self._update_port( [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] _ensure_no_port_binding_failure(port) [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.499253] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] raise exception.PortBindingFailed(port_id=port['id']) [ 531.499961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] nova.exception.PortBindingFailed: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. [ 531.499961] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] [ 531.499961] env[61839]: INFO nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Terminating instance [ 531.502763] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Acquiring lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.502940] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Acquired lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.503135] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 531.578717] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.642856] env[61839]: INFO nova.compute.manager [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Took 13.44 seconds to build instance. [ 531.722303] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7c54fb-3027-444f-b93b-752b8395f36b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.731763] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3926662-d49a-4534-a0be-b564f600baba {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.765031] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.766636] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cf2189-d5cd-4d4c-b86c-8d187e20b348 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.776662] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f329cc-ad1f-4648-beb3-419efab1f42f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.792454] env[61839]: DEBUG nova.compute.provider_tree [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 531.922063] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314229, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.02372} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.922357] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] File moved {{(pid=61839) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 531.923505] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Cleaning up location [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 531.923505] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Deleting the datastore file [datastore2] vmware_temp/2c10bf97-0f63-47a7-8f5f-f20c364b5fa5 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 531.923505] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3d119db-e922-460b-ae53-8e5539d43b47 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.931337] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 531.931337] env[61839]: value = "task-1314230" [ 531.931337] env[61839]: _type = "Task" [ 531.931337] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.942290] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.080653] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.145891] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4249fffb-3702-4570-9f57-2cbc28b164c9 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "211e8267-3c33-42c8-852f-1c20d7987453" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.949s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.146170] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "211e8267-3c33-42c8-852f-1c20d7987453" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.613s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.146368] env[61839]: INFO nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] During sync_power_state the instance has a pending task (spawning). Skip. [ 532.146542] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "211e8267-3c33-42c8-852f-1c20d7987453" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.271132] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Releasing lock "refresh_cache-c258dbf2-be81-40e5-a11a-03dee332d3b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.271315] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 532.271480] env[61839]: DEBUG nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 532.271892] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 532.296500] env[61839]: DEBUG nova.scheduler.client.report [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 532.308970] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.380340] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.444877] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025547} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.445147] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 532.446599] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96ea07ec-fc82-4567-ad06-531dbff8bbe4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.451849] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 532.451849] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52877a9d-0105-7d2b-ae36-4aeb478931fb" [ 532.451849] env[61839]: _type = "Task" [ 532.451849] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.462286] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52877a9d-0105-7d2b-ae36-4aeb478931fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.515421] env[61839]: INFO nova.scheduler.client.report [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Deleted allocations for instance 95685d36-7efa-42a9-8722-5b90d6edbce5 [ 532.558922] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "248ced8d-af78-4400-b9eb-449851b1bfa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.558922] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "248ced8d-af78-4400-b9eb-449851b1bfa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.804819] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.785s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.805453] env[61839]: ERROR nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] Traceback (most recent call last): [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self.driver.spawn(context, instance, image_meta, [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] vm_ref = self.build_virtual_machine(instance, [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.805453] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] for vif in network_info: [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return self._sync_wrapper(fn, *args, **kwargs) [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self.wait() [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self[:] = self._gt.wait() [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return self._exit_event.wait() [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] result = hub.switch() [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.805806] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return self.greenlet.switch() [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] result = function(*args, **kwargs) [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] return func(*args, **kwargs) [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] raise e [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] nwinfo = self.network_api.allocate_for_instance( [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] created_port_ids = self._update_ports_for_instance( [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] with excutils.save_and_reraise_exception(): [ 532.806209] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] self.force_reraise() [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] raise self.value [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] updated_port = self._update_port( [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] _ensure_no_port_binding_failure(port) [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] raise exception.PortBindingFailed(port_id=port['id']) [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] nova.exception.PortBindingFailed: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. [ 532.806606] env[61839]: ERROR nova.compute.manager [instance: be976871-706d-41e1-9423-55ff251a52e9] [ 532.807049] env[61839]: DEBUG nova.compute.utils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 532.807604] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.960s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.812404] env[61839]: INFO nova.compute.claims [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.813199] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Build of instance be976871-706d-41e1-9423-55ff251a52e9 was re-scheduled: Binding failed for port 93010f8d-86ce-4203-ac82-ff57a4f2d76b, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 532.813642] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 532.813861] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Acquiring lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.814028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Acquired lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.814161] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 532.815140] env[61839]: DEBUG nova.network.neutron [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.882438] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Releasing lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.882858] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 532.883079] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 532.883395] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fa8c495-33cd-4bd9-a76c-665dcabf431d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.895740] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f06635-2a01-470e-9bb1-b17ef8361efc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.920133] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5 could not be found. [ 532.920798] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 532.920798] env[61839]: INFO nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 532.920910] env[61839]: DEBUG oslo.service.loopingcall [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 532.921064] env[61839]: DEBUG nova.compute.manager [-] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 532.921156] env[61839]: DEBUG nova.network.neutron [-] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 532.963247] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52877a9d-0105-7d2b-ae36-4aeb478931fb, 'name': SearchDatastore_Task, 'duration_secs': 0.008823} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.963621] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.963757] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 41823a25-5ff2-4838-854d-5bada8e5daca/41823a25-5ff2-4838-854d-5bada8e5daca.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 532.964933] env[61839]: DEBUG nova.network.neutron [-] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.966991] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52ee18b2-7d26-4e95-b102-8c98ed61892b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.975636] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 532.975636] env[61839]: value = "task-1314231" [ 532.975636] env[61839]: _type = "Task" [ 532.975636] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.986513] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314231, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.026122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-12eb9dfa-b92e-47f2-a67a-cc399fb0a366 tempest-TenantUsagesTestJSON-1491461262 tempest-TenantUsagesTestJSON-1491461262-project-member] Lock "95685d36-7efa-42a9-8722-5b90d6edbce5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.221s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.026313] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "95685d36-7efa-42a9-8722-5b90d6edbce5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.494s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.026787] env[61839]: INFO nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 95685d36-7efa-42a9-8722-5b90d6edbce5] During sync_power_state the instance has a pending task (spawning). Skip. [ 533.026787] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "95685d36-7efa-42a9-8722-5b90d6edbce5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.062335] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 533.341814] env[61839]: INFO nova.compute.manager [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] Took 1.07 seconds to deallocate network for instance. [ 533.397923] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.470448] env[61839]: DEBUG nova.network.neutron [-] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.496038] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314231, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.620040] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.913356] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.977649] env[61839]: INFO nova.compute.manager [-] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Took 1.06 seconds to deallocate network for instance. [ 533.986328] env[61839]: DEBUG nova.compute.claims [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 533.986479] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.992956] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314231, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666393} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.992956] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 41823a25-5ff2-4838-854d-5bada8e5daca/41823a25-5ff2-4838-854d-5bada8e5daca.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 533.993096] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 533.993239] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91c8b55b-ea3d-415c-8a76-531def0b7335 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.002455] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 534.002455] env[61839]: value = "task-1314232" [ 534.002455] env[61839]: _type = "Task" [ 534.002455] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.012148] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314232, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.037389] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fd2f6d-e9e1-432b-9644-b2258a97809c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.046184] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662c7c58-ef9b-40fb-beec-d10a5a7dccf2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.079556] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603ade0c-7dc6-48a8-930f-00768250e9d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.087284] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d5357a-3a32-43a2-85b5-50fcb3286624 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.101232] env[61839]: DEBUG nova.compute.provider_tree [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.119438] env[61839]: INFO nova.compute.manager [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Rebuilding instance [ 534.165942] env[61839]: DEBUG nova.compute.manager [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 534.167632] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00505a4-b842-4850-ba6d-8efeb3f40e8e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.399540] env[61839]: INFO nova.scheduler.client.report [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Deleted allocations for instance c258dbf2-be81-40e5-a11a-03dee332d3b6 [ 534.419671] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Releasing lock "refresh_cache-be976871-706d-41e1-9423-55ff251a52e9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.420068] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 534.420169] env[61839]: DEBUG nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.420269] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.495275] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.514490] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314232, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086158} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.514743] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 534.515635] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165ac376-2fd3-4345-a19a-998f7e33d3e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.539902] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 41823a25-5ff2-4838-854d-5bada8e5daca/41823a25-5ff2-4838-854d-5bada8e5daca.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 534.540251] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3de40ef2-06ef-4263-bb11-d3319b5c876d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.565628] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 534.565628] env[61839]: value = "task-1314233" [ 534.565628] env[61839]: _type = "Task" [ 534.565628] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.575139] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314233, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.607400] env[61839]: DEBUG nova.scheduler.client.report [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 534.682703] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 534.682703] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98e2b5eb-adf7-43dc-b55c-1d8f3cab4946 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.690964] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 534.690964] env[61839]: value = "task-1314234" [ 534.690964] env[61839]: _type = "Task" [ 534.690964] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.703131] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.910867] env[61839]: DEBUG oslo_concurrency.lockutils [None req-71b74068-a47a-43e7-91f8-80e309405090 tempest-ServerDiagnosticsTest-192461370 tempest-ServerDiagnosticsTest-192461370-project-member] Lock "c258dbf2-be81-40e5-a11a-03dee332d3b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.454s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.911173] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "c258dbf2-be81-40e5-a11a-03dee332d3b6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.378s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.911345] env[61839]: INFO nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: c258dbf2-be81-40e5-a11a-03dee332d3b6] During sync_power_state the instance has a pending task (spawning). Skip. [ 534.911519] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "c258dbf2-be81-40e5-a11a-03dee332d3b6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.998316] env[61839]: DEBUG nova.network.neutron [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.041260] env[61839]: DEBUG nova.compute.manager [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Received event network-changed-16fa8de1-60b8-4e77-bc49-e40967dff057 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 535.041260] env[61839]: DEBUG nova.compute.manager [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Refreshing instance network info cache due to event network-changed-16fa8de1-60b8-4e77-bc49-e40967dff057. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 535.041260] env[61839]: DEBUG oslo_concurrency.lockutils [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] Acquiring lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.041260] env[61839]: DEBUG oslo_concurrency.lockutils [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] Acquired lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.041260] env[61839]: DEBUG nova.network.neutron [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Refreshing network info cache for port 16fa8de1-60b8-4e77-bc49-e40967dff057 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 535.081799] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314233, 'name': ReconfigVM_Task, 'duration_secs': 0.287436} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.082121] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 41823a25-5ff2-4838-854d-5bada8e5daca/41823a25-5ff2-4838-854d-5bada8e5daca.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 535.082737] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-303570ae-195e-422f-8479-5c465cab4e03 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.090046] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 535.090046] env[61839]: value = "task-1314235" [ 535.090046] env[61839]: _type = "Task" [ 535.090046] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.099365] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314235, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.113973] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.114496] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 535.119683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.081s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.119683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.119683] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 535.119683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.766s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.122338] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf0a81a-b2d2-4e36-871e-368db214b1f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.132017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea29c54d-cf01-493d-8871-198fcd7594c1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.153118] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dbb7d0-6182-4b07-8c52-4865db2638ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.160788] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29177ba-bb8d-45af-906e-471ca06a6222 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.206856] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181468MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 535.207054] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.216233] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314234, 'name': PowerOffVM_Task, 'duration_secs': 0.120638} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.216547] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 535.216803] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 535.220804] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01840617-7075-4d0e-a48c-5cf36b18c3a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.229710] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 535.230520] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80e7ef2c-47b8-460e-964f-4d644183b683 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.263959] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 535.263959] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 535.263959] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleting the datastore file [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 535.263959] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c1fadaa-0770-4412-8286-a6ae14dd7547 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.275018] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 535.275018] env[61839]: value = "task-1314237" [ 535.275018] env[61839]: _type = "Task" [ 535.275018] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.283281] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.346311] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Acquiring lock "d0822954-42c2-4003-baf4-97bc2ce65768" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.346859] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Lock "d0822954-42c2-4003-baf4-97bc2ce65768" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.451753] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Acquiring lock "2032b746-2161-4487-ae4c-6159313241f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.451753] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Lock "2032b746-2161-4487-ae4c-6159313241f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.501933] env[61839]: INFO nova.compute.manager [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] [instance: be976871-706d-41e1-9423-55ff251a52e9] Took 1.08 seconds to deallocate network for instance. [ 535.576539] env[61839]: DEBUG nova.network.neutron [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.600254] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314235, 'name': Rename_Task, 'duration_secs': 0.274331} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.600553] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 535.600824] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42e36694-45a4-4af3-80b8-f516a448c401 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.610837] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 535.610837] env[61839]: value = "task-1314238" [ 535.610837] env[61839]: _type = "Task" [ 535.610837] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.621653] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.623611] env[61839]: DEBUG nova.compute.utils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 535.625792] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 535.625792] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 535.748303] env[61839]: DEBUG nova.policy [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c6592a3a3e346289b404817f5a2896c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22d66807a11f4c9ca54d28b1c13f185c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 535.786521] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097082} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.791615] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 535.792085] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 535.792085] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 535.854208] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 535.869572] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf33d37-bdba-46a9-84a8-08a3e25141fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.878614] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5293fd66-779a-460c-bef8-6cfd07770aa9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.924804] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716624e6-b14e-498c-8462-fe5c06a7ed0e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.933293] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2251b42c-f737-4bc6-8b18-a89481cc61de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.948687] env[61839]: DEBUG nova.compute.provider_tree [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.956798] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 536.123692] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314238, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.130021] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 536.323169] env[61839]: DEBUG nova.network.neutron [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.386926] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.444709] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Successfully created port: 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 536.452540] env[61839]: DEBUG nova.scheduler.client.report [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 536.480617] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.554109] env[61839]: INFO nova.scheduler.client.report [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Deleted allocations for instance be976871-706d-41e1-9423-55ff251a52e9 [ 536.625962] env[61839]: DEBUG oslo_vmware.api [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314238, 'name': PowerOnVM_Task, 'duration_secs': 0.656656} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.626253] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 536.626444] env[61839]: INFO nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Took 9.59 seconds to spawn the instance on the hypervisor. [ 536.626710] env[61839]: DEBUG nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 536.627558] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990d5f0e-aa32-478d-8667-1e8937468df9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.826561] env[61839]: DEBUG oslo_concurrency.lockutils [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] Releasing lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.826818] env[61839]: DEBUG nova.compute.manager [req-7e8461e0-54d6-494d-a7a5-aa1e55daeb30 req-c3b48f74-a0e0-4e4e-82b5-f632a03d6328 service nova] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Received event network-vif-deleted-16fa8de1-60b8-4e77-bc49-e40967dff057 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.831301] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 536.831415] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 536.831949] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 536.831949] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 536.831949] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 536.832261] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 536.832450] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 536.832614] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 536.832782] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 536.832941] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 536.833123] env[61839]: DEBUG nova.virt.hardware [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 536.833977] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afff5d5-305f-49f8-b8d7-57a54dbac343 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.844116] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fad067-393f-4dda-a9c1-d9c0a8049f1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.863656] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 536.869200] env[61839]: DEBUG oslo.service.loopingcall [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 536.869530] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 536.869755] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83df11fd-dc0b-4433-9653-358174eab7fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.886920] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 536.886920] env[61839]: value = "task-1314239" [ 536.886920] env[61839]: _type = "Task" [ 536.886920] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.900479] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314239, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.959365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.840s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.962016] env[61839]: ERROR nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Traceback (most recent call last): [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self.driver.spawn(context, instance, image_meta, [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] vm_ref = self.build_virtual_machine(instance, [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.962016] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] for vif in network_info: [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return self._sync_wrapper(fn, *args, **kwargs) [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self.wait() [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self[:] = self._gt.wait() [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return self._exit_event.wait() [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] result = hub.switch() [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.962425] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return self.greenlet.switch() [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] result = function(*args, **kwargs) [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] return func(*args, **kwargs) [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] raise e [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] nwinfo = self.network_api.allocate_for_instance( [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] created_port_ids = self._update_ports_for_instance( [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] with excutils.save_and_reraise_exception(): [ 536.962749] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] self.force_reraise() [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] raise self.value [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] updated_port = self._update_port( [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] _ensure_no_port_binding_failure(port) [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] raise exception.PortBindingFailed(port_id=port['id']) [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] nova.exception.PortBindingFailed: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. [ 536.963118] env[61839]: ERROR nova.compute.manager [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] [ 536.963414] env[61839]: DEBUG nova.compute.utils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 536.963414] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.693s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.964188] env[61839]: INFO nova.compute.claims [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 536.967612] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Build of instance 570fb8fe-391a-4f1b-be51-17979e9fb049 was re-scheduled: Binding failed for port b7ee1667-a38f-429e-acb4-c7171559db2b, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 536.968054] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 536.968367] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Acquiring lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.968524] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Acquired lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.968684] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 537.062616] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67c06703-2c83-4f84-94a3-1193ea8f10d4 tempest-ImagesNegativeTestJSON-313379827 tempest-ImagesNegativeTestJSON-313379827-project-member] Lock "be976871-706d-41e1-9423-55ff251a52e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.115s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.062858] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "be976871-706d-41e1-9423-55ff251a52e9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.530s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.063060] env[61839]: INFO nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: be976871-706d-41e1-9423-55ff251a52e9] During sync_power_state the instance has a pending task (spawning). Skip. [ 537.063255] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "be976871-706d-41e1-9423-55ff251a52e9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.142102] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 537.146639] env[61839]: INFO nova.compute.manager [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Took 17.27 seconds to build instance. [ 537.186473] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 537.186473] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 537.186473] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.186765] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 537.186765] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.186765] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 537.186765] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 537.186886] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 537.187071] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 537.187200] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 537.187371] env[61839]: DEBUG nova.virt.hardware [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 537.188250] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fae058c-e561-407a-a764-7c14753efe23 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.196826] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd80fa7b-aa43-4dfa-896e-607a1f0edb34 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.404432] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314239, 'name': CreateVM_Task, 'duration_secs': 0.497681} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.404432] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 537.404432] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.404432] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.404432] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 537.404432] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53887d66-5b07-47e5-8806-a0c91a6b6337 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.409023] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 537.409023] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b380da-8bb5-a0b4-ac64-17a491542ec5" [ 537.409023] env[61839]: _type = "Task" [ 537.409023] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.429071] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b380da-8bb5-a0b4-ac64-17a491542ec5, 'name': SearchDatastore_Task, 'duration_secs': 0.008403} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.429071] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.429071] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 537.429071] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.429326] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.429326] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 537.429326] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9a3774e-1206-4f40-9f2e-f8abbbf95954 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.435969] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 537.436223] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 537.437035] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e653372-6250-4e85-a55a-1c7bb4da11aa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.445872] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 537.445872] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a8c25f-72c9-369d-9a46-1953c0852bff" [ 537.445872] env[61839]: _type = "Task" [ 537.445872] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.454103] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a8c25f-72c9-369d-9a46-1953c0852bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.523012] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.654578] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f8fd2f30-7a32-47b5-822d-e89a7c694abc tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "41823a25-5ff2-4838-854d-5bada8e5daca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.786s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.654715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "41823a25-5ff2-4838-854d-5bada8e5daca" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.121s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.655725] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7d331c-8588-4a71-b97d-bf987feb02b1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.754208] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.835898] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.839284] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.864772] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "29dcaaa2-04fe-4835-acc9-41c433e6165f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.864772] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "29dcaaa2-04fe-4835-acc9-41c433e6165f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.897924] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "89843511-d201-431b-918d-e789e38e4f68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.898161] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "89843511-d201-431b-918d-e789e38e4f68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.957477] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a8c25f-72c9-369d-9a46-1953c0852bff, 'name': SearchDatastore_Task, 'duration_secs': 0.008373} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.958069] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85cf3f1f-2b96-4cf9-aa9f-95187f625498 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.963322] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 537.963322] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d5d53f-8417-4e70-5057-2d092f629f1a" [ 537.963322] env[61839]: _type = "Task" [ 537.963322] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.971421] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d5d53f-8417-4e70-5057-2d092f629f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.171491] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "41823a25-5ff2-4838-854d-5bada8e5daca" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.517s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.190960] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2482a49-1eff-4859-86ca-39b5cd180910 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.201529] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47951bbd-fa5e-4f0a-bded-e32f0d560e7e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.236155] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358113aa-0a83-4b32-b925-d3e480a73f89 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.243867] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7a5a89-00a5-494d-a134-1b2fa29eb4d1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.257616] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Releasing lock "refresh_cache-570fb8fe-391a-4f1b-be51-17979e9fb049" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.257837] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 538.257998] env[61839]: DEBUG nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 538.258178] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 538.260036] env[61839]: DEBUG nova.compute.provider_tree [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.290251] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.345273] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.367172] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.400376] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.479198] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d5d53f-8417-4e70-5057-2d092f629f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.008179} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.480491] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.480573] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 538.481029] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e54802b9-83ca-4204-96fc-7a12ba5d1d00 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.490384] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 538.490384] env[61839]: value = "task-1314240" [ 538.490384] env[61839]: _type = "Task" [ 538.490384] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.502261] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.763781] env[61839]: DEBUG nova.scheduler.client.report [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 538.794206] env[61839]: DEBUG nova.network.neutron [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.886543] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.904302] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.931093] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.007100] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465827} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 539.007879] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 539.008603] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 539.008979] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5c42d13-05c0-4ac0-91c7-8b583533301e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.020912] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Acquiring lock "0b78efda-51c7-4d51-be31-005ff0d44ede" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.020912] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Lock "0b78efda-51c7-4d51-be31-005ff0d44ede" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.020912] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 539.020912] env[61839]: value = "task-1314241" [ 539.020912] env[61839]: _type = "Task" [ 539.020912] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.032967] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.271095] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.273880] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 539.289025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.667s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.289025] env[61839]: INFO nova.compute.claims [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.297146] env[61839]: INFO nova.compute.manager [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] Took 1.04 seconds to deallocate network for instance. [ 539.470898] env[61839]: ERROR nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. [ 539.470898] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 539.470898] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.470898] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 539.470898] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.470898] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 539.470898] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.470898] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 539.470898] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.470898] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 539.470898] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.470898] env[61839]: ERROR nova.compute.manager raise self.value [ 539.470898] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.470898] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 539.470898] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.470898] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 539.472043] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.472043] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 539.472043] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. [ 539.472043] env[61839]: ERROR nova.compute.manager [ 539.472043] env[61839]: Traceback (most recent call last): [ 539.472043] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 539.472043] env[61839]: listener.cb(fileno) [ 539.472043] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.472043] env[61839]: result = function(*args, **kwargs) [ 539.472043] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 539.472043] env[61839]: return func(*args, **kwargs) [ 539.472043] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.472043] env[61839]: raise e [ 539.472043] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.472043] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 539.472043] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.472043] env[61839]: created_port_ids = self._update_ports_for_instance( [ 539.472043] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.472043] env[61839]: with excutils.save_and_reraise_exception(): [ 539.472043] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.472043] env[61839]: self.force_reraise() [ 539.472043] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.472043] env[61839]: raise self.value [ 539.472043] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.472043] env[61839]: updated_port = self._update_port( [ 539.472043] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.472043] env[61839]: _ensure_no_port_binding_failure(port) [ 539.472043] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.472043] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 539.473069] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. [ 539.473069] env[61839]: Removing descriptor: 16 [ 539.473069] env[61839]: ERROR nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Traceback (most recent call last): [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] yield resources [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self.driver.spawn(context, instance, image_meta, [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.473069] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] vm_ref = self.build_virtual_machine(instance, [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] for vif in network_info: [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return self._sync_wrapper(fn, *args, **kwargs) [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self.wait() [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self[:] = self._gt.wait() [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return self._exit_event.wait() [ 539.473400] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] result = hub.switch() [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return self.greenlet.switch() [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] result = function(*args, **kwargs) [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return func(*args, **kwargs) [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] raise e [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] nwinfo = self.network_api.allocate_for_instance( [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.473723] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] created_port_ids = self._update_ports_for_instance( [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] with excutils.save_and_reraise_exception(): [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self.force_reraise() [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] raise self.value [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] updated_port = self._update_port( [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] _ensure_no_port_binding_failure(port) [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.474086] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] raise exception.PortBindingFailed(port_id=port['id']) [ 539.474406] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] nova.exception.PortBindingFailed: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. [ 539.474406] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] [ 539.474406] env[61839]: INFO nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Terminating instance [ 539.474406] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Acquiring lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.474512] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Acquired lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.475330] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 539.533021] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066391} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 539.533021] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 539.533021] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074401a0-fc60-460f-b000-ce86ce3cb970 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.554067] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 539.554876] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aedd2b17-753c-4d7e-bb9c-8cdaca3d606d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.575904] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 539.575904] env[61839]: value = "task-1314242" [ 539.575904] env[61839]: _type = "Task" [ 539.575904] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.584475] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314242, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.785280] env[61839]: DEBUG nova.compute.utils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 539.786688] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 539.786864] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 539.872859] env[61839]: DEBUG nova.policy [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '930bae29e95443f297b6eb0b5f6083ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10af9c3ee00140a586ea14c289f4d138', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 540.051976] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 540.089092] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314242, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.290928] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 540.339895] env[61839]: INFO nova.scheduler.client.report [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Deleted allocations for instance 570fb8fe-391a-4f1b-be51-17979e9fb049 [ 540.536640] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.559873] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f536684c-5dc4-4acc-8e6c-c6645b0ee3f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.575022] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90757488-ba75-4fc2-abf1-1f1b8dbad94a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.616913] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe9e0ec-f599-4572-a327-8d1a18136265 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.621955] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314242, 'name': ReconfigVM_Task, 'duration_secs': 0.993597} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.622723] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 540.623420] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44913b5a-f576-42d8-a0f9-ff8f277c0328 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.628529] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350c27ea-19ac-49ac-9e23-789f3e0435cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.633883] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 540.633883] env[61839]: value = "task-1314243" [ 540.633883] env[61839]: _type = "Task" [ 540.633883] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.645870] env[61839]: DEBUG nova.compute.provider_tree [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.652673] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314243, 'name': Rename_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.720348] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Successfully created port: 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.851762] env[61839]: DEBUG oslo_concurrency.lockutils [None req-93209ce3-de5a-4c91-b8b0-efd4bd89a6cc tempest-ServerDiagnosticsNegativeTest-1581315091 tempest-ServerDiagnosticsNegativeTest-1581315091-project-member] Lock "570fb8fe-391a-4f1b-be51-17979e9fb049" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.437s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.853251] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "570fb8fe-391a-4f1b-be51-17979e9fb049" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.320s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.853454] env[61839]: INFO nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 570fb8fe-391a-4f1b-be51-17979e9fb049] During sync_power_state the instance has a pending task (spawning). Skip. [ 540.853628] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "570fb8fe-391a-4f1b-be51-17979e9fb049" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.042183] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Releasing lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.044215] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 541.044215] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 541.044215] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b891617-ccc1-4ce4-851d-4c84be39fc0b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.056336] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913f2690-b45c-4490-996b-ff102a71def5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.083020] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9594f132-d558-4c75-872f-b1d1b7c08f66 could not be found. [ 541.083020] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 541.083020] env[61839]: INFO nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Took 0.04 seconds to destroy the instance on the hypervisor. [ 541.083020] env[61839]: DEBUG oslo.service.loopingcall [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 541.083020] env[61839]: DEBUG nova.compute.manager [-] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 541.083020] env[61839]: DEBUG nova.network.neutron [-] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 541.131588] env[61839]: DEBUG nova.network.neutron [-] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.147051] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314243, 'name': Rename_Task, 'duration_secs': 0.145684} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.147928] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 541.148753] env[61839]: DEBUG nova.scheduler.client.report [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 541.152552] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dc12eeb-1a97-4e1b-9f06-76f237b6338e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.159842] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 541.159842] env[61839]: value = "task-1314244" [ 541.159842] env[61839]: _type = "Task" [ 541.159842] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.172185] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.310492] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 541.342345] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:53:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='504058166',id=25,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1076769309',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 541.342568] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 541.342734] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.342926] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 541.343205] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.343392] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 541.343627] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 541.343798] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 541.343977] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 541.344157] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 541.344421] env[61839]: DEBUG nova.virt.hardware [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 541.345623] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd1d1c5-a3a4-412c-aafc-8c3791a83fb2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.353661] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71aadc0-eab6-4069-8a86-252ad1308d2f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.362616] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 541.509471] env[61839]: DEBUG nova.compute.manager [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Received event network-changed-9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 541.509737] env[61839]: DEBUG nova.compute.manager [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Refreshing instance network info cache due to event network-changed-9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 541.513320] env[61839]: DEBUG oslo_concurrency.lockutils [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] Acquiring lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.515017] env[61839]: DEBUG oslo_concurrency.lockutils [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] Acquired lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.515017] env[61839]: DEBUG nova.network.neutron [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Refreshing network info cache for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 541.637483] env[61839]: DEBUG nova.network.neutron [-] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.656173] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.656380] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 541.658949] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.672s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.676155] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314244, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.899914] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.051354] env[61839]: DEBUG nova.network.neutron [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.139933] env[61839]: INFO nova.compute.manager [-] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Took 1.06 seconds to deallocate network for instance. [ 542.143501] env[61839]: DEBUG nova.compute.claims [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 542.143921] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.166816] env[61839]: DEBUG nova.compute.utils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.173938] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 542.174342] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 542.190162] env[61839]: DEBUG oslo_vmware.api [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314244, 'name': PowerOnVM_Task, 'duration_secs': 0.839474} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.190728] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 542.191223] env[61839]: DEBUG nova.compute.manager [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 542.195159] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75a6fee-8d59-4a79-bf74-a11abb9d588f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.312699] env[61839]: DEBUG nova.network.neutron [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.351397] env[61839]: DEBUG nova.policy [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83647c05134346c3a025d32237bbc0ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd21bf55ac954e8ca9e24a6eb3069e91', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 542.378355] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b692743-24f7-40d9-b75f-62cce9e71539 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.386188] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b385e11-b8f1-4a5b-9ae9-c6a3d77d8462 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.423767] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5983e9ab-c426-4b2b-a1a5-842cfe6bee82 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.431413] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dd6f84-93fa-42b9-ad4d-cd741598237c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.448256] env[61839]: DEBUG nova.compute.provider_tree [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.671745] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 542.714105] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.818327] env[61839]: DEBUG oslo_concurrency.lockutils [req-29d88a8a-c8d9-4ecb-abd4-ee863689b8dd req-943a08d4-78e6-4eae-afea-30ec30bfc48e service nova] Releasing lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.953368] env[61839]: DEBUG nova.scheduler.client.report [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 542.996132] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Acquiring lock "ae1917f8-29af-43cc-8397-3b9072acee6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.996765] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Lock "ae1917f8-29af-43cc-8397-3b9072acee6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.400808] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Successfully created port: 52531475-8b0c-433d-85e9-c0b56de31422 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 543.463683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.804s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.463773] env[61839]: ERROR nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Traceback (most recent call last): [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self.driver.spawn(context, instance, image_meta, [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] vm_ref = self.build_virtual_machine(instance, [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.463773] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] for vif in network_info: [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return self._sync_wrapper(fn, *args, **kwargs) [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self.wait() [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self[:] = self._gt.wait() [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return self._exit_event.wait() [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] result = hub.switch() [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.464087] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return self.greenlet.switch() [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] result = function(*args, **kwargs) [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] return func(*args, **kwargs) [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] raise e [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] nwinfo = self.network_api.allocate_for_instance( [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] created_port_ids = self._update_ports_for_instance( [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] with excutils.save_and_reraise_exception(): [ 543.464653] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] self.force_reraise() [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] raise self.value [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] updated_port = self._update_port( [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] _ensure_no_port_binding_failure(port) [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] raise exception.PortBindingFailed(port_id=port['id']) [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] nova.exception.PortBindingFailed: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. [ 543.465440] env[61839]: ERROR nova.compute.manager [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] [ 543.465721] env[61839]: DEBUG nova.compute.utils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 543.465721] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.259s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.471600] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Build of instance 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5 was re-scheduled: Binding failed for port 16fa8de1-60b8-4e77-bc49-e40967dff057, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 543.471600] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 543.471600] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Acquiring lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.471600] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Acquired lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.471793] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 543.515938] env[61839]: ERROR nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. [ 543.515938] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 543.515938] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.515938] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 543.515938] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.515938] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 543.515938] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.515938] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 543.515938] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.515938] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 543.515938] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.515938] env[61839]: ERROR nova.compute.manager raise self.value [ 543.515938] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.515938] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 543.515938] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.515938] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 543.516489] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.516489] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 543.516489] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. [ 543.516489] env[61839]: ERROR nova.compute.manager [ 543.516489] env[61839]: Traceback (most recent call last): [ 543.516489] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 543.516489] env[61839]: listener.cb(fileno) [ 543.516489] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.516489] env[61839]: result = function(*args, **kwargs) [ 543.516489] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.516489] env[61839]: return func(*args, **kwargs) [ 543.516489] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.516489] env[61839]: raise e [ 543.516489] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.516489] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 543.516489] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.516489] env[61839]: created_port_ids = self._update_ports_for_instance( [ 543.516489] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.516489] env[61839]: with excutils.save_and_reraise_exception(): [ 543.516489] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.516489] env[61839]: self.force_reraise() [ 543.516489] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.516489] env[61839]: raise self.value [ 543.516489] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.516489] env[61839]: updated_port = self._update_port( [ 543.516489] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.516489] env[61839]: _ensure_no_port_binding_failure(port) [ 543.516489] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.516489] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 543.517265] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. [ 543.517265] env[61839]: Removing descriptor: 16 [ 543.520308] env[61839]: ERROR nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Traceback (most recent call last): [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] yield resources [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self.driver.spawn(context, instance, image_meta, [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] vm_ref = self.build_virtual_machine(instance, [ 543.520308] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] for vif in network_info: [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return self._sync_wrapper(fn, *args, **kwargs) [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self.wait() [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self[:] = self._gt.wait() [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return self._exit_event.wait() [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.520659] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] result = hub.switch() [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return self.greenlet.switch() [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] result = function(*args, **kwargs) [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return func(*args, **kwargs) [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] raise e [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] nwinfo = self.network_api.allocate_for_instance( [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] created_port_ids = self._update_ports_for_instance( [ 543.521051] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] with excutils.save_and_reraise_exception(): [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self.force_reraise() [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] raise self.value [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] updated_port = self._update_port( [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] _ensure_no_port_binding_failure(port) [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] raise exception.PortBindingFailed(port_id=port['id']) [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] nova.exception.PortBindingFailed: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. [ 543.521377] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] [ 543.521713] env[61839]: INFO nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Terminating instance [ 543.525855] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Acquiring lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.525972] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Acquired lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.526348] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 543.627489] env[61839]: DEBUG nova.compute.manager [None req-2ecc1dba-b23d-4c0b-92f6-2903d871aa23 tempest-ServerDiagnosticsV248Test-233712532 tempest-ServerDiagnosticsV248Test-233712532-project-admin] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 543.629339] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569ab625-1aa8-4b62-81c7-087e000f931a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.642071] env[61839]: INFO nova.compute.manager [None req-2ecc1dba-b23d-4c0b-92f6-2903d871aa23 tempest-ServerDiagnosticsV248Test-233712532 tempest-ServerDiagnosticsV248Test-233712532-project-admin] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Retrieving diagnostics [ 543.642566] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8320eb2f-a193-45f0-877d-346b7bf8b57f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.683032] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 543.716947] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.718496] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.718736] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.719025] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.719211] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.720112] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.720112] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.720112] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.720112] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.720112] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.720468] env[61839]: DEBUG nova.virt.hardware [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.721357] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6d84a5-a983-4cd1-8926-67ee240d6bcc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.735971] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fa0187-ac30-49d4-adf2-592440704a74 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.007804] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.115415] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Acquiring lock "1edc2966-2edc-453e-a80d-c4139d910a6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.115633] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Lock "1edc2966-2edc-453e-a80d-c4139d910a6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.116754] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.202720] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.426675] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.526449] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 211e8267-3c33-42c8-852f-1c20d7987453 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 544.527659] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 41823a25-5ff2-4838-854d-5bada8e5daca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 544.708087] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Releasing lock "refresh_cache-6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.708087] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 544.708087] env[61839]: DEBUG nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 544.708087] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 544.780186] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.935169] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Releasing lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.935428] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 544.935629] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 544.936239] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94ccf2d9-0c7c-4d44-a341-e36562bad2b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.946808] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96138a45-bc32-460e-812f-722099f0dba4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.971911] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d0ed5c81-e05a-41a3-9e45-ae0a2a235f16 could not be found. [ 544.972154] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 544.972381] env[61839]: INFO nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Took 0.04 seconds to destroy the instance on the hypervisor. [ 544.972644] env[61839]: DEBUG oslo.service.loopingcall [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.972880] env[61839]: DEBUG nova.compute.manager [-] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 544.972971] env[61839]: DEBUG nova.network.neutron [-] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 545.018066] env[61839]: DEBUG nova.network.neutron [-] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.032513] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 545.032718] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 9594f132-d558-4c75-872f-b1d1b7c08f66 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 545.032875] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d0ed5c81-e05a-41a3-9e45-ae0a2a235f16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 545.033008] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 248ced8d-af78-4400-b9eb-449851b1bfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 545.284575] env[61839]: DEBUG nova.network.neutron [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.429596] env[61839]: DEBUG nova.compute.manager [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Received event network-vif-deleted-9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 545.429833] env[61839]: DEBUG nova.compute.manager [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Received event network-changed-1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 545.429995] env[61839]: DEBUG nova.compute.manager [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Refreshing instance network info cache due to event network-changed-1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 545.430286] env[61839]: DEBUG oslo_concurrency.lockutils [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] Acquiring lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.430433] env[61839]: DEBUG oslo_concurrency.lockutils [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] Acquired lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.430764] env[61839]: DEBUG nova.network.neutron [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Refreshing network info cache for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 545.523913] env[61839]: DEBUG nova.network.neutron [-] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.538456] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d0822954-42c2-4003-baf4-97bc2ce65768 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 545.789264] env[61839]: INFO nova.compute.manager [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] [instance: 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5] Took 1.08 seconds to deallocate network for instance. [ 545.975767] env[61839]: DEBUG nova.network.neutron [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 546.025978] env[61839]: INFO nova.compute.manager [-] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Took 1.05 seconds to deallocate network for instance. [ 546.030019] env[61839]: DEBUG nova.compute.claims [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 546.030019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.042167] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 2032b746-2161-4487-ae4c-6159313241f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 546.267296] env[61839]: DEBUG nova.network.neutron [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.527827] env[61839]: INFO nova.compute.manager [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Rebuilding instance [ 546.548468] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance ec27ab37-2351-4ad1-b41f-8de9bfab8b9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 546.599372] env[61839]: DEBUG nova.compute.manager [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 546.599372] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b50159-0efb-445b-a853-30c7d12d8721 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.772054] env[61839]: DEBUG oslo_concurrency.lockutils [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] Releasing lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.772054] env[61839]: DEBUG nova.compute.manager [req-21fdc8e5-7d4a-42ec-913b-7cc5d649588e req-cbd7eead-4ce3-437a-a9da-2de8a8d7cad0 service nova] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Received event network-vif-deleted-1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 546.836401] env[61839]: INFO nova.scheduler.client.report [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Deleted allocations for instance 6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5 [ 546.871321] env[61839]: ERROR nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. [ 546.871321] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 546.871321] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 546.871321] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 546.871321] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 546.871321] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 546.871321] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 546.871321] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 546.871321] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.871321] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 546.871321] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.871321] env[61839]: ERROR nova.compute.manager raise self.value [ 546.871321] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 546.871321] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 546.871321] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.871321] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 546.872614] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.872614] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 546.872614] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. [ 546.872614] env[61839]: ERROR nova.compute.manager [ 546.872614] env[61839]: Traceback (most recent call last): [ 546.872614] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 546.872614] env[61839]: listener.cb(fileno) [ 546.872614] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.872614] env[61839]: result = function(*args, **kwargs) [ 546.872614] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 546.872614] env[61839]: return func(*args, **kwargs) [ 546.872614] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 546.872614] env[61839]: raise e [ 546.872614] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 546.872614] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 546.872614] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 546.872614] env[61839]: created_port_ids = self._update_ports_for_instance( [ 546.872614] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 546.872614] env[61839]: with excutils.save_and_reraise_exception(): [ 546.872614] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.872614] env[61839]: self.force_reraise() [ 546.872614] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.872614] env[61839]: raise self.value [ 546.872614] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 546.872614] env[61839]: updated_port = self._update_port( [ 546.872614] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.872614] env[61839]: _ensure_no_port_binding_failure(port) [ 546.872614] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.872614] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 546.873671] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. [ 546.873671] env[61839]: Removing descriptor: 15 [ 546.875126] env[61839]: ERROR nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Traceback (most recent call last): [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] yield resources [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self.driver.spawn(context, instance, image_meta, [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] vm_ref = self.build_virtual_machine(instance, [ 546.875126] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] for vif in network_info: [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return self._sync_wrapper(fn, *args, **kwargs) [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self.wait() [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self[:] = self._gt.wait() [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return self._exit_event.wait() [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.877467] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] result = hub.switch() [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return self.greenlet.switch() [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] result = function(*args, **kwargs) [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return func(*args, **kwargs) [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] raise e [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] nwinfo = self.network_api.allocate_for_instance( [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] created_port_ids = self._update_ports_for_instance( [ 546.878104] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] with excutils.save_and_reraise_exception(): [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self.force_reraise() [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] raise self.value [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] updated_port = self._update_port( [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] _ensure_no_port_binding_failure(port) [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] raise exception.PortBindingFailed(port_id=port['id']) [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] nova.exception.PortBindingFailed: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. [ 546.878556] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] [ 546.879166] env[61839]: INFO nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Terminating instance [ 546.881850] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.881850] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquired lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.881850] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 547.053307] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 29dcaaa2-04fe-4835-acc9-41c433e6165f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 547.113844] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 547.114264] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd7147c3-e5df-466e-86c4-aaf39e668d91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.124792] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 547.124792] env[61839]: value = "task-1314245" [ 547.124792] env[61839]: _type = "Task" [ 547.124792] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.141792] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314245, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.347715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2194692e-8fa8-43a0-9f32-21d13bbfb66f tempest-ServersTestFqdnHostnames-1400549167 tempest-ServersTestFqdnHostnames-1400549167-project-member] Lock "6e1ff1fd-f97d-4a0d-bbdd-7ee286577ff5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.233s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.440019] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.557632] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 89843511-d201-431b-918d-e789e38e4f68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 547.624570] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Acquiring lock "9e14bd1a-d6c2-4f4a-8919-27647ae5742b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.624817] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Lock "9e14bd1a-d6c2-4f4a-8919-27647ae5742b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.638024] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314245, 'name': PowerOffVM_Task, 'duration_secs': 0.154737} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.638024] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 547.638196] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 547.639029] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c0f78e-ad31-41e9-a48b-a7e0494cf4d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.644784] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.650041] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 547.650041] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5031cebb-a0f1-491e-bc68-967fb91c710c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.677064] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 547.677676] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 547.677813] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Deleting the datastore file [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 547.678162] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2511b7fe-cf10-4c7b-9567-987bfa6943b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.687746] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 547.687746] env[61839]: value = "task-1314247" [ 547.687746] env[61839]: _type = "Task" [ 547.687746] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.698087] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314247, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.853459] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 547.970258] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.970722] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.066932] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 0b78efda-51c7-4d51-be31-005ff0d44ede has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 548.145670] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Releasing lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.146351] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 548.147102] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 548.147102] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5bcdf86-a30f-4235-95f4-eaf5b72f3263 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.158017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c56d801-7200-4654-b153-d05bb297455a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.189225] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 248ced8d-af78-4400-b9eb-449851b1bfa3 could not be found. [ 548.189225] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 548.189338] env[61839]: INFO nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 548.189493] env[61839]: DEBUG oslo.service.loopingcall [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.193309] env[61839]: DEBUG nova.compute.manager [-] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 548.193409] env[61839]: DEBUG nova.network.neutron [-] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 548.200385] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314247, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161444} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.200616] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 548.200789] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 548.201023] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 548.234010] env[61839]: DEBUG nova.network.neutron [-] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.385155] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.395440] env[61839]: DEBUG nova.compute.manager [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Received event network-changed-52531475-8b0c-433d-85e9-c0b56de31422 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 548.395646] env[61839]: DEBUG nova.compute.manager [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Refreshing instance network info cache due to event network-changed-52531475-8b0c-433d-85e9-c0b56de31422. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 548.395992] env[61839]: DEBUG oslo_concurrency.lockutils [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] Acquiring lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.396069] env[61839]: DEBUG oslo_concurrency.lockutils [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] Acquired lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.396262] env[61839]: DEBUG nova.network.neutron [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Refreshing network info cache for port 52531475-8b0c-433d-85e9-c0b56de31422 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 548.569078] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance ae1917f8-29af-43cc-8397-3b9072acee6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 548.633555] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Acquiring lock "916834d8-4819-4167-8774-b0a665021ef8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.633782] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Lock "916834d8-4819-4167-8774-b0a665021ef8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.736547] env[61839]: DEBUG nova.network.neutron [-] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.068207] env[61839]: DEBUG nova.network.neutron [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.073367] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 1edc2966-2edc-453e-a80d-c4139d910a6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 549.073367] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 549.073573] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 549.188025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "7fdd773e-3a96-4728-b162-0227a415bc96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.188416] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "7fdd773e-3a96-4728-b162-0227a415bc96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.206918] env[61839]: DEBUG nova.network.neutron [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.240305] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.240557] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.240720] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.240890] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.241110] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.241286] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.241572] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.241953] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.242840] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.242840] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.242840] env[61839]: DEBUG nova.virt.hardware [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.243725] env[61839]: INFO nova.compute.manager [-] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Took 1.05 seconds to deallocate network for instance. [ 549.245441] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d86af95-1f40-4922-a628-8f171174b92a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.253020] env[61839]: DEBUG nova.compute.claims [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 549.253217] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.259095] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d66647-210a-4c9f-a609-33986835d860 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.273738] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 549.279379] env[61839]: DEBUG oslo.service.loopingcall [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.285530] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 549.285530] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21c6e199-e38c-46e1-a1ba-6fb9c4ae857e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.303953] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 549.303953] env[61839]: value = "task-1314248" [ 549.303953] env[61839]: _type = "Task" [ 549.303953] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.318982] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314248, 'name': CreateVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.408947] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df97182-49ba-4f33-b185-032d87fde702 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.418026] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4a7a03-88fa-439b-bb71-61a38b39ab77 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.449957] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34727bd0-be92-4f8f-b4be-4338e3051dad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.456625] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b65b537-4aa6-4d17-8044-2244ab33d1fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.471679] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.713458] env[61839]: DEBUG oslo_concurrency.lockutils [req-68abca67-63b8-4d71-bd9c-a11d33402554 req-76a28017-4955-442f-b94a-62fa66b709ba service nova] Releasing lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.813624] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314248, 'name': CreateVM_Task, 'duration_secs': 0.288882} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.815109] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 549.815109] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.815109] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.815109] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 549.815109] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3409291-a13a-4e9f-9c7e-2df39e3b6809 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.819313] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 549.819313] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bdb613-5086-7832-eae4-751d8c47a094" [ 549.819313] env[61839]: _type = "Task" [ 549.819313] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.826974] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bdb613-5086-7832-eae4-751d8c47a094, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.976340] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.106802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "f8b36479-70a1-4f4e-84f4-e3baf9a56c45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.107098] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "f8b36479-70a1-4f4e-84f4-e3baf9a56c45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.331392] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bdb613-5086-7832-eae4-751d8c47a094, 'name': SearchDatastore_Task, 'duration_secs': 0.009991} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.331788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.331910] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 550.332153] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.332296] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.332472] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 550.332725] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fa8a8f3-0d20-4809-b346-4a4c74afff14 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.340470] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 550.340647] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 550.341379] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68141736-951d-4e78-a471-48337996ff92 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.346903] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 550.346903] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cd7bdd-1c79-5f4b-f21d-8d37a36ae157" [ 550.346903] env[61839]: _type = "Task" [ 550.346903] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.356150] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cd7bdd-1c79-5f4b-f21d-8d37a36ae157, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.480888] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 550.481215] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.015s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.481629] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.095s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.482963] env[61839]: INFO nova.compute.claims [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.485738] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.486056] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Getting list of instances from cluster (obj){ [ 550.486056] env[61839]: value = "domain-c8" [ 550.486056] env[61839]: _type = "ClusterComputeResource" [ 550.486056] env[61839]: } {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 550.493644] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70606cf5-f4ba-4e7b-a372-509ad9bdf8fb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.504513] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Got total of 2 instances {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 550.860552] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cd7bdd-1c79-5f4b-f21d-8d37a36ae157, 'name': SearchDatastore_Task, 'duration_secs': 0.010694} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.861364] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27a84647-9816-4ed9-b3c1-effcc7a81e71 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.867189] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 550.867189] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f233a9-324d-a40e-6369-ee85ef4f663c" [ 550.867189] env[61839]: _type = "Task" [ 550.867189] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.879168] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f233a9-324d-a40e-6369-ee85ef4f663c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.168805] env[61839]: DEBUG nova.compute.manager [req-0231b4dc-511c-4d7d-bbfc-4565b979c6bb req-7c6e9119-14ea-444c-9fdb-ee6e8659c356 service nova] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Received event network-vif-deleted-52531475-8b0c-433d-85e9-c0b56de31422 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 551.382307] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f233a9-324d-a40e-6369-ee85ef4f663c, 'name': SearchDatastore_Task, 'duration_secs': 0.008846} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.382785] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.383061] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 551.383773] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a7bdc1f-c232-4d97-a16f-15c55c51bf6d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.393568] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 551.393568] env[61839]: value = "task-1314249" [ 551.393568] env[61839]: _type = "Task" [ 551.393568] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.403558] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.487375] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.487375] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.487375] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 551.487574] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 551.845099] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0ee1f1-9312-4388-b3fb-bf40207676e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.857277] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708d8770-d81f-47c0-acae-91d4bbb790cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.904614] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8db29a-52d8-49e9-81af-d6eddb5b1278 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.912262] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491115} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.914909] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 551.915167] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 551.918019] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c31554cb-7e0c-441d-82dd-b253513777fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.921751] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae7dcd5-37dd-4f49-b3d4-213dda3989e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.937917] env[61839]: DEBUG nova.compute.provider_tree [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.943706] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 551.943706] env[61839]: value = "task-1314250" [ 551.943706] env[61839]: _type = "Task" [ 551.943706] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.950294] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314250, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.992305] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 551.992486] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 551.992553] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 551.992619] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 551.993022] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-211e8267-3c33-42c8-852f-1c20d7987453" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.993022] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-211e8267-3c33-42c8-852f-1c20d7987453" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.993196] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 551.995018] env[61839]: DEBUG nova.objects.instance [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lazy-loading 'info_cache' on Instance uuid 211e8267-3c33-42c8-852f-1c20d7987453 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 552.190290] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "86a1fc77-26d9-44c7-8f1f-771315769619" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.190290] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "86a1fc77-26d9-44c7-8f1f-771315769619" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.443636] env[61839]: DEBUG nova.scheduler.client.report [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.461350] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314250, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063725} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.462406] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 552.465436] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd50611b-c057-4888-9ef3-9fa1b6a51e3c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.494680] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 552.496236] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9703271-8c5d-492e-8815-db79446b5c36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.519240] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 552.519240] env[61839]: value = "task-1314251" [ 552.519240] env[61839]: _type = "Task" [ 552.519240] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.527924] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.598154] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.599111] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.955182] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.955736] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 552.958259] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.478s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.959587] env[61839]: INFO nova.compute.claims [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.031530] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314251, 'name': ReconfigVM_Task, 'duration_secs': 0.254796} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.031530] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453/211e8267-3c33-42c8-852f-1c20d7987453.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 553.031530] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c5d0232-3f1a-4177-9998-2398c69f0da2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.037495] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 553.037495] env[61839]: value = "task-1314252" [ 553.037495] env[61839]: _type = "Task" [ 553.037495] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.046076] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314252, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.049848] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.471499] env[61839]: DEBUG nova.compute.utils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.479843] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 553.483023] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 553.551291] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314252, 'name': Rename_Task, 'duration_secs': 0.123605} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.551683] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 553.551961] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85cd8918-5226-4fba-b9ff-17be95f42f9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.563032] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Waiting for the task: (returnval){ [ 553.563032] env[61839]: value = "task-1314253" [ 553.563032] env[61839]: _type = "Task" [ 553.563032] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.570921] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314253, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.823966] env[61839]: DEBUG nova.policy [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '479aa01185584e4da6cc97070d982b01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3befea17539e4c97bc8ea123cb06f287', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 553.956569] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.977081] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 554.073685] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314253, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.414846] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5166be-5f6a-4197-8efa-db0940943f88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.422784] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0d9446-0b73-447c-9785-b17356cc3565 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.460909] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-211e8267-3c33-42c8-852f-1c20d7987453" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.461394] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 554.461709] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.462423] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8af7230-c59f-4f82-b6dc-cb6cd57432c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.465385] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.467025] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.467025] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.467025] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.471139] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c4576d-1182-48c2-ac5e-4aaf7f9990af {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.499287] env[61839]: DEBUG nova.compute.provider_tree [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.574131] env[61839]: DEBUG oslo_vmware.api [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Task: {'id': task-1314253, 'name': PowerOnVM_Task, 'duration_secs': 0.808951} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.574721] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 554.574721] env[61839]: DEBUG nova.compute.manager [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 554.577229] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffeac44-dcb7-4fdc-9b41-c14a6e55c370 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.867134] env[61839]: DEBUG nova.compute.manager [None req-b146072d-f2db-4ad5-ba47-8a0c47ba6bd5 tempest-ServerDiagnosticsV248Test-233712532 tempest-ServerDiagnosticsV248Test-233712532-project-admin] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 554.868423] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c254b4c1-8a20-423a-b148-1946e499953e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.875491] env[61839]: INFO nova.compute.manager [None req-b146072d-f2db-4ad5-ba47-8a0c47ba6bd5 tempest-ServerDiagnosticsV248Test-233712532 tempest-ServerDiagnosticsV248Test-233712532-project-admin] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Retrieving diagnostics [ 554.876650] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7432746c-4460-4c1e-9acc-aceab199b206 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.006315] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 555.008916] env[61839]: DEBUG nova.scheduler.client.report [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.050444] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.050698] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.052456] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.052456] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.052456] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.052456] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.052456] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.052883] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.052883] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.052883] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.052883] env[61839]: DEBUG nova.virt.hardware [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.053351] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5529ad1-42bd-42ef-b842-5c40557d1704 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.061381] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb86ed6-692d-4673-80a0-bb13669de005 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.105314] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.516196] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.516503] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 555.519270] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.633s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.520816] env[61839]: INFO nova.compute.claims [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.029046] env[61839]: DEBUG nova.compute.utils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 556.030739] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 556.030739] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 556.310637] env[61839]: DEBUG nova.policy [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '837778fbb871418895f40f6aab56028c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd217788babb0497cb16ba18555ec6fd3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 556.374117] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Successfully created port: 9a01ee46-d236-4b9b-8c66-098e5658a725 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 556.543570] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 557.007964] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0eee0c2-4dd9-4930-8e61-921521a9a6f5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.019080] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7123d3-410c-4cfb-adab-36be8ff98243 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.051079] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6103ca-19a5-454a-aeb8-3b1e2dfbf719 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.062331] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6241d29-0e49-428d-bf23-46332ae4c025 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.080904] env[61839]: DEBUG nova.compute.provider_tree [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.134663] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "41823a25-5ff2-4838-854d-5bada8e5daca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.135075] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "41823a25-5ff2-4838-854d-5bada8e5daca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.135569] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "41823a25-5ff2-4838-854d-5bada8e5daca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.138377] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "41823a25-5ff2-4838-854d-5bada8e5daca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.138377] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "41823a25-5ff2-4838-854d-5bada8e5daca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.140921] env[61839]: INFO nova.compute.manager [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Terminating instance [ 557.143623] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "refresh_cache-41823a25-5ff2-4838-854d-5bada8e5daca" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.144260] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquired lock "refresh_cache-41823a25-5ff2-4838-854d-5bada8e5daca" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.144558] env[61839]: DEBUG nova.network.neutron [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 557.559945] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 557.587711] env[61839]: DEBUG nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 557.605672] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 557.606359] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 557.606443] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 557.608615] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 557.608615] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 557.608615] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 557.608615] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 557.608615] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 557.608866] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 557.608866] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 557.608866] env[61839]: DEBUG nova.virt.hardware [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 557.609544] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5aa1d4-cb9a-48c1-a1f2-c8836f328f61 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.618390] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b189547-f5bc-44b2-9332-375a32f87f91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.745546] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "211e8267-3c33-42c8-852f-1c20d7987453" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.745546] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "211e8267-3c33-42c8-852f-1c20d7987453" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.745778] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "211e8267-3c33-42c8-852f-1c20d7987453-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.745918] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "211e8267-3c33-42c8-852f-1c20d7987453-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.746064] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "211e8267-3c33-42c8-852f-1c20d7987453-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.753638] env[61839]: INFO nova.compute.manager [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Terminating instance [ 557.755979] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "refresh_cache-211e8267-3c33-42c8-852f-1c20d7987453" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.756156] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquired lock "refresh_cache-211e8267-3c33-42c8-852f-1c20d7987453" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.756333] env[61839]: DEBUG nova.network.neutron [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 557.796555] env[61839]: DEBUG nova.network.neutron [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.938582] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Successfully created port: 02380c98-1108-4354-a18d-f9cabaed08c3 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 558.095844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.576s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.096680] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 558.100050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.196s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.102188] env[61839]: INFO nova.compute.claims [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.106600] env[61839]: DEBUG nova.network.neutron [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.334674] env[61839]: DEBUG nova.network.neutron [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.434184] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "7d9a24e0-c265-4255-964f-54c971c02ded" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.434477] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "7d9a24e0-c265-4255-964f-54c971c02ded" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.609100] env[61839]: DEBUG nova.compute.utils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 558.610487] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 558.610851] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 558.614996] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Releasing lock "refresh_cache-41823a25-5ff2-4838-854d-5bada8e5daca" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.616427] env[61839]: DEBUG nova.compute.manager [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 558.616658] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 558.620113] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f7d701-8f1d-4ef2-a1d3-22bd55e89daf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.625384] env[61839]: DEBUG nova.network.neutron [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.632169] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 558.632169] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cad952ae-20d7-4a3f-81dd-c34422e3f006 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.638752] env[61839]: DEBUG oslo_vmware.api [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 558.638752] env[61839]: value = "task-1314254" [ 558.638752] env[61839]: _type = "Task" [ 558.638752] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.649335] env[61839]: DEBUG oslo_vmware.api [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.926438] env[61839]: DEBUG nova.policy [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b66687e1672c4b05b1186779d70e9b7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7249e84b91414990cd0f56dfac0909', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 559.121680] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 559.134609] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Releasing lock "refresh_cache-211e8267-3c33-42c8-852f-1c20d7987453" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.137972] env[61839]: DEBUG nova.compute.manager [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 559.138324] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 559.140142] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7916410e-dffc-4f8e-ab1f-5be2ffe871db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.154952] env[61839]: DEBUG oslo_vmware.api [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314254, 'name': PowerOffVM_Task, 'duration_secs': 0.168134} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.157191] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 559.157266] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 559.157527] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 559.157870] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cae7b89-0673-4cf4-afa9-c9be31268c0c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.159468] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ad076fa-da14-4f44-8eda-dd30fa8eda89 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.167382] env[61839]: DEBUG oslo_vmware.api [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 559.167382] env[61839]: value = "task-1314256" [ 559.167382] env[61839]: _type = "Task" [ 559.167382] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.184073] env[61839]: DEBUG oslo_vmware.api [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.194051] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 559.194051] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 559.194051] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Deleting the datastore file [datastore2] 41823a25-5ff2-4838-854d-5bada8e5daca {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.194051] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8022abd3-cefd-45b1-850e-fb5f388159e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.205033] env[61839]: DEBUG oslo_vmware.api [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for the task: (returnval){ [ 559.205033] env[61839]: value = "task-1314257" [ 559.205033] env[61839]: _type = "Task" [ 559.205033] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.219492] env[61839]: DEBUG oslo_vmware.api [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314257, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.502055] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a64154-d910-4588-9621-25200e7aac0e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.510392] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09acd2a-7f60-4d50-aa99-88c42d44f9ed {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.546862] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7153ecd-f5eb-48c7-9f7b-384226f36e4e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.554140] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d00b0c-eb50-490b-a203-7e8bfbca7a81 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.568336] env[61839]: DEBUG nova.compute.provider_tree [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.677327] env[61839]: DEBUG oslo_vmware.api [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314256, 'name': PowerOffVM_Task, 'duration_secs': 0.19666} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.677667] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 559.679200] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 559.679200] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-318b4e15-86cc-426d-9d6a-a6a3ec7c5f68 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.701592] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 559.701789] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 559.701961] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleting the datastore file [datastore1] 211e8267-3c33-42c8-852f-1c20d7987453 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.702232] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-787d3c4c-4fc4-4a69-88c3-cad70430b34a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.722020] env[61839]: DEBUG oslo_vmware.api [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Task: {'id': task-1314257, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134431} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.722020] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 559.722020] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 559.722824] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 559.723026] env[61839]: INFO nova.compute.manager [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Took 1.11 seconds to destroy the instance on the hypervisor. [ 559.723868] env[61839]: DEBUG oslo.service.loopingcall [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.724151] env[61839]: DEBUG oslo_vmware.api [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for the task: (returnval){ [ 559.724151] env[61839]: value = "task-1314259" [ 559.724151] env[61839]: _type = "Task" [ 559.724151] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.724338] env[61839]: DEBUG nova.compute.manager [-] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 559.724466] env[61839]: DEBUG nova.network.neutron [-] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 559.737230] env[61839]: DEBUG oslo_vmware.api [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.775958] env[61839]: DEBUG nova.network.neutron [-] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.071540] env[61839]: DEBUG nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.147355] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 560.185406] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 560.185654] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 560.185803] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 560.185976] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 560.186502] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 560.186749] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 560.186998] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 560.187246] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 560.187470] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 560.187705] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 560.187918] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 560.188812] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54a16ac-9d1f-479a-8864-2260d660a803 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.199627] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3ada45-10b3-4ad2-9818-9831d15bd9da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.235991] env[61839]: DEBUG oslo_vmware.api [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Task: {'id': task-1314259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100314} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.236573] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.236754] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 560.236927] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 560.237114] env[61839]: INFO nova.compute.manager [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Took 1.10 seconds to destroy the instance on the hypervisor. [ 560.237349] env[61839]: DEBUG oslo.service.loopingcall [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.237567] env[61839]: DEBUG nova.compute.manager [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 560.237721] env[61839]: DEBUG nova.network.neutron [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 560.283028] env[61839]: DEBUG nova.network.neutron [-] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.297688] env[61839]: DEBUG nova.network.neutron [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.578713] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.479s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.581264] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 560.586037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.655s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.590312] env[61839]: INFO nova.compute.claims [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.663403] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Successfully created port: 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.787908] env[61839]: INFO nova.compute.manager [-] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Took 1.06 seconds to deallocate network for instance. [ 560.800647] env[61839]: DEBUG nova.network.neutron [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.089537] env[61839]: DEBUG nova.compute.utils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.090382] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 561.090484] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 561.270764] env[61839]: DEBUG nova.policy [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b66687e1672c4b05b1186779d70e9b7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7249e84b91414990cd0f56dfac0909', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 561.295600] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.303552] env[61839]: INFO nova.compute.manager [-] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Took 1.07 seconds to deallocate network for instance. [ 561.597060] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 561.816911] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.999019] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef5e2da-50c2-48fd-8493-9956112f4211 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.006299] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301f8b0b-a3da-4003-9407-5d70da80b626 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.040986] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8365afa-1b1d-498c-a1c1-c276c55439cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.049389] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abd3eba-fb5e-4f52-8df8-35e87263d151 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.066869] env[61839]: DEBUG nova.compute.provider_tree [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.563329] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Successfully created port: 73783eb6-0ed5-45d6-b54a-aad8df2a83fd {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.572181] env[61839]: DEBUG nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 562.610671] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 562.633440] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 562.633760] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 562.633961] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 562.634260] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 562.634400] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 562.634594] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 562.634851] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 562.635056] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 562.635266] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 562.635471] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 562.636032] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 562.636660] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e7c741-f5cf-40d3-9ff2-4cc75c768c5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.645834] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b17a21e-70e7-4c4e-a1d6-b57dbd06833f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.078534] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.078534] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 563.083201] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.183s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.088155] env[61839]: INFO nova.compute.claims [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.200300] env[61839]: ERROR nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. [ 563.200300] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 563.200300] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.200300] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 563.200300] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 563.200300] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 563.200300] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 563.200300] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 563.200300] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.200300] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 563.200300] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.200300] env[61839]: ERROR nova.compute.manager raise self.value [ 563.200300] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 563.200300] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 563.200300] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.200300] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 563.201077] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.201077] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 563.201077] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. [ 563.201077] env[61839]: ERROR nova.compute.manager [ 563.201077] env[61839]: Traceback (most recent call last): [ 563.201077] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 563.201077] env[61839]: listener.cb(fileno) [ 563.201077] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.201077] env[61839]: result = function(*args, **kwargs) [ 563.201077] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 563.201077] env[61839]: return func(*args, **kwargs) [ 563.201077] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.201077] env[61839]: raise e [ 563.201077] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.201077] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 563.201077] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 563.201077] env[61839]: created_port_ids = self._update_ports_for_instance( [ 563.201077] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 563.201077] env[61839]: with excutils.save_and_reraise_exception(): [ 563.201077] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.201077] env[61839]: self.force_reraise() [ 563.201077] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.201077] env[61839]: raise self.value [ 563.201077] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 563.201077] env[61839]: updated_port = self._update_port( [ 563.201077] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.201077] env[61839]: _ensure_no_port_binding_failure(port) [ 563.201077] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.201077] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 563.202050] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. [ 563.202050] env[61839]: Removing descriptor: 15 [ 563.202050] env[61839]: ERROR nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Traceback (most recent call last): [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] yield resources [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self.driver.spawn(context, instance, image_meta, [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self._vmops.spawn(context, instance, image_meta, injected_files, [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 563.202050] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] vm_ref = self.build_virtual_machine(instance, [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] vif_infos = vmwarevif.get_vif_info(self._session, [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] for vif in network_info: [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return self._sync_wrapper(fn, *args, **kwargs) [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self.wait() [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self[:] = self._gt.wait() [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return self._exit_event.wait() [ 563.202561] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] result = hub.switch() [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return self.greenlet.switch() [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] result = function(*args, **kwargs) [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return func(*args, **kwargs) [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] raise e [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] nwinfo = self.network_api.allocate_for_instance( [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 563.202996] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] created_port_ids = self._update_ports_for_instance( [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] with excutils.save_and_reraise_exception(): [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self.force_reraise() [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] raise self.value [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] updated_port = self._update_port( [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] _ensure_no_port_binding_failure(port) [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.203545] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] raise exception.PortBindingFailed(port_id=port['id']) [ 563.203924] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] nova.exception.PortBindingFailed: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. [ 563.203924] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] [ 563.203924] env[61839]: INFO nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Terminating instance [ 563.203924] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Acquiring lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.203924] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Acquired lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.203924] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 563.584745] env[61839]: DEBUG nova.compute.utils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.586950] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 563.587209] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 563.729522] env[61839]: DEBUG nova.compute.manager [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Received event network-changed-9a01ee46-d236-4b9b-8c66-098e5658a725 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 563.729721] env[61839]: DEBUG nova.compute.manager [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Refreshing instance network info cache due to event network-changed-9a01ee46-d236-4b9b-8c66-098e5658a725. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 563.732031] env[61839]: DEBUG oslo_concurrency.lockutils [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] Acquiring lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.789738] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.840472] env[61839]: DEBUG nova.policy [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b66687e1672c4b05b1186779d70e9b7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7249e84b91414990cd0f56dfac0909', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 564.090558] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 564.214306] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.373298] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Acquiring lock "5a130776-5e5f-4eec-8574-08aa1f1ef97a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.373543] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Lock "5a130776-5e5f-4eec-8574-08aa1f1ef97a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.540860] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3547aa23-8fea-451c-a717-78746f9a2d28 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.555835] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2cd15c-51dd-487a-9a4f-f813ba85990a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.590041] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b107a04-a33e-4f01-9b7a-4f4434194011 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.596695] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d5d23d-de42-479b-bd77-20645386a010 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.615156] env[61839]: DEBUG nova.compute.provider_tree [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.718020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Releasing lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.718020] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 564.718020] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 564.718020] env[61839]: DEBUG oslo_concurrency.lockutils [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] Acquired lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.718020] env[61839]: DEBUG nova.network.neutron [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Refreshing network info cache for port 9a01ee46-d236-4b9b-8c66-098e5658a725 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 564.719045] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b53701d-6317-4eeb-92a7-a3b54663394b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.734689] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3491cdca-bc09-4ebb-ba48-5d3c55aee5c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.761135] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d0822954-42c2-4003-baf4-97bc2ce65768 could not be found. [ 564.761406] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 564.761588] env[61839]: INFO nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Took 0.04 seconds to destroy the instance on the hypervisor. [ 564.761827] env[61839]: DEBUG oslo.service.loopingcall [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 564.762027] env[61839]: DEBUG nova.compute.manager [-] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 564.762116] env[61839]: DEBUG nova.network.neutron [-] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 564.842400] env[61839]: DEBUG nova.network.neutron [-] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.107565] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 565.120110] env[61839]: DEBUG nova.scheduler.client.report [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 565.134913] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 565.136026] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 565.136026] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.136026] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 565.136026] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.136026] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 565.136301] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 565.138544] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 565.139139] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 565.139246] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 565.139427] env[61839]: DEBUG nova.virt.hardware [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 565.140364] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0985f56e-80e0-4f37-aff2-3aede85ed9b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.159024] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930365fd-1b70-49ed-a875-93a1ae664d18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.318715] env[61839]: DEBUG nova.network.neutron [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.340489] env[61839]: ERROR nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. [ 565.340489] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 565.340489] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.340489] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 565.340489] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.340489] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 565.340489] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.340489] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 565.340489] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.340489] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 565.340489] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.340489] env[61839]: ERROR nova.compute.manager raise self.value [ 565.340489] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.340489] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 565.340489] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.340489] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 565.341076] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.341076] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 565.341076] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. [ 565.341076] env[61839]: ERROR nova.compute.manager [ 565.341473] env[61839]: Traceback (most recent call last): [ 565.341576] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 565.341576] env[61839]: listener.cb(fileno) [ 565.341916] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.341916] env[61839]: result = function(*args, **kwargs) [ 565.342049] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.342049] env[61839]: return func(*args, **kwargs) [ 565.342121] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.342121] env[61839]: raise e [ 565.342189] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.342189] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 565.342283] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.342283] env[61839]: created_port_ids = self._update_ports_for_instance( [ 565.342351] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.342351] env[61839]: with excutils.save_and_reraise_exception(): [ 565.342424] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.342424] env[61839]: self.force_reraise() [ 565.343119] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.343119] env[61839]: raise self.value [ 565.343119] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.343119] env[61839]: updated_port = self._update_port( [ 565.343119] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.343119] env[61839]: _ensure_no_port_binding_failure(port) [ 565.343119] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.343119] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 565.343119] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. [ 565.343119] env[61839]: Removing descriptor: 17 [ 565.343853] env[61839]: ERROR nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] Traceback (most recent call last): [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] yield resources [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self.driver.spawn(context, instance, image_meta, [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] vm_ref = self.build_virtual_machine(instance, [ 565.343853] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] vif_infos = vmwarevif.get_vif_info(self._session, [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] for vif in network_info: [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return self._sync_wrapper(fn, *args, **kwargs) [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self.wait() [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self[:] = self._gt.wait() [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return self._exit_event.wait() [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 565.344245] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] result = hub.switch() [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return self.greenlet.switch() [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] result = function(*args, **kwargs) [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return func(*args, **kwargs) [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] raise e [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] nwinfo = self.network_api.allocate_for_instance( [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] created_port_ids = self._update_ports_for_instance( [ 565.344668] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] with excutils.save_and_reraise_exception(): [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self.force_reraise() [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] raise self.value [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] updated_port = self._update_port( [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] _ensure_no_port_binding_failure(port) [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] raise exception.PortBindingFailed(port_id=port['id']) [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] nova.exception.PortBindingFailed: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. [ 565.345097] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] [ 565.345498] env[61839]: INFO nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Terminating instance [ 565.345498] env[61839]: DEBUG nova.network.neutron [-] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.350401] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Acquiring lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.350401] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Acquired lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.350401] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.603818] env[61839]: DEBUG nova.network.neutron [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.625583] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.625583] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 565.632752] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.489s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.788170] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Successfully created port: 6e1dec40-0217-4ce2-92ae-fb5211d70403 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.848578] env[61839]: INFO nova.compute.manager [-] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Took 1.09 seconds to deallocate network for instance. [ 565.851012] env[61839]: DEBUG nova.compute.claims [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 565.851288] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.915430] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.107904] env[61839]: DEBUG oslo_concurrency.lockutils [req-a7aa4eb1-63fe-42fe-84b9-07b322f8bf1c req-aea950dc-830a-4175-bb63-325b53543207 service nova] Releasing lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.135106] env[61839]: DEBUG nova.compute.utils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.135940] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 566.139706] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 566.353263] env[61839]: DEBUG nova.policy [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27f71be7a80c4a7091d14a4a5371f17b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2c00d6053f347f8a65e036c963da6bd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 566.359676] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.522477] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. [ 566.522477] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 566.522477] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.522477] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 566.522477] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.522477] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 566.522477] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.522477] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 566.522477] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.522477] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 566.522477] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.522477] env[61839]: ERROR nova.compute.manager raise self.value [ 566.522477] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.522477] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 566.522477] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.522477] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 566.523038] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.523038] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 566.523038] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. [ 566.523038] env[61839]: ERROR nova.compute.manager [ 566.523038] env[61839]: Traceback (most recent call last): [ 566.523038] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 566.523038] env[61839]: listener.cb(fileno) [ 566.523038] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.523038] env[61839]: result = function(*args, **kwargs) [ 566.523038] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.523038] env[61839]: return func(*args, **kwargs) [ 566.523038] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.523038] env[61839]: raise e [ 566.523038] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.523038] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 566.523038] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.523038] env[61839]: created_port_ids = self._update_ports_for_instance( [ 566.523038] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.523038] env[61839]: with excutils.save_and_reraise_exception(): [ 566.523038] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.523038] env[61839]: self.force_reraise() [ 566.523038] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.523038] env[61839]: raise self.value [ 566.523038] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.523038] env[61839]: updated_port = self._update_port( [ 566.523038] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.523038] env[61839]: _ensure_no_port_binding_failure(port) [ 566.523038] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.523038] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 566.524606] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. [ 566.524606] env[61839]: Removing descriptor: 16 [ 566.524606] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Traceback (most recent call last): [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] yield resources [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self.driver.spawn(context, instance, image_meta, [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 566.524606] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] vm_ref = self.build_virtual_machine(instance, [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] vif_infos = vmwarevif.get_vif_info(self._session, [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] for vif in network_info: [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return self._sync_wrapper(fn, *args, **kwargs) [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self.wait() [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self[:] = self._gt.wait() [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return self._exit_event.wait() [ 566.525025] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] result = hub.switch() [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return self.greenlet.switch() [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] result = function(*args, **kwargs) [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return func(*args, **kwargs) [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] raise e [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] nwinfo = self.network_api.allocate_for_instance( [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.525497] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] created_port_ids = self._update_ports_for_instance( [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] with excutils.save_and_reraise_exception(): [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self.force_reraise() [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] raise self.value [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] updated_port = self._update_port( [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] _ensure_no_port_binding_failure(port) [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.525870] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] raise exception.PortBindingFailed(port_id=port['id']) [ 566.526954] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] nova.exception.PortBindingFailed: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. [ 566.526954] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] [ 566.526954] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Terminating instance [ 566.526954] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.526954] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquired lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.526954] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 566.567159] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd29d60-4a75-4489-8d8a-70fc95c7130a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.576709] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d299cdf-8e4c-461b-bc26-703eb71b89ca {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.608828] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced49e13-4d0e-429f-87ea-7675d27c50c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.617170] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fad4397-2c1d-4a39-8258-706dabaa4669 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.633311] env[61839]: DEBUG nova.compute.provider_tree [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.645410] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 566.865046] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Releasing lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.865046] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 566.865046] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 566.865443] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fda74de7-36fc-400b-86af-7806712049f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.876063] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d165941e-9a41-418e-8d00-0a089a8d3f5e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.901337] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2032b746-2161-4487-ae4c-6159313241f4 could not be found. [ 566.901337] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 566.901337] env[61839]: INFO nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 566.901337] env[61839]: DEBUG oslo.service.loopingcall [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.901337] env[61839]: DEBUG nova.compute.manager [-] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 566.901337] env[61839]: DEBUG nova.network.neutron [-] [instance: 2032b746-2161-4487-ae4c-6159313241f4] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 566.930156] env[61839]: DEBUG nova.network.neutron [-] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.983931] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Acquiring lock "328882b4-d1af-4036-b313-ecada7d53899" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.984184] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Lock "328882b4-d1af-4036-b313-ecada7d53899" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.066166] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.136601] env[61839]: DEBUG nova.scheduler.client.report [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.263111] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.433345] env[61839]: DEBUG nova.network.neutron [-] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.644694] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.012s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.647813] env[61839]: ERROR nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Traceback (most recent call last): [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self.driver.spawn(context, instance, image_meta, [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] vm_ref = self.build_virtual_machine(instance, [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.647813] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] for vif in network_info: [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return self._sync_wrapper(fn, *args, **kwargs) [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self.wait() [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self[:] = self._gt.wait() [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return self._exit_event.wait() [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] result = hub.switch() [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.648310] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return self.greenlet.switch() [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] result = function(*args, **kwargs) [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] return func(*args, **kwargs) [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] raise e [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] nwinfo = self.network_api.allocate_for_instance( [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] created_port_ids = self._update_ports_for_instance( [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] with excutils.save_and_reraise_exception(): [ 567.648698] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] self.force_reraise() [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] raise self.value [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] updated_port = self._update_port( [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] _ensure_no_port_binding_failure(port) [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] raise exception.PortBindingFailed(port_id=port['id']) [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] nova.exception.PortBindingFailed: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. [ 567.649094] env[61839]: ERROR nova.compute.manager [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] [ 567.649477] env[61839]: DEBUG nova.compute.utils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 567.652925] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.937s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.652925] env[61839]: DEBUG nova.objects.instance [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 567.658453] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Build of instance 9594f132-d558-4c75-872f-b1d1b7c08f66 was re-scheduled: Binding failed for port 9c3b8ee6-c7b3-473b-99ac-0212bfa57f3e, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 567.658453] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 567.658800] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Acquiring lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.658875] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Acquired lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.658985] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.662301] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 567.682588] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Acquiring lock "2670f16e-4c44-4b88-937e-9e491f599acb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.682893] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Lock "2670f16e-4c44-4b88-937e-9e491f599acb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.701817] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.701817] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.701817] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.702148] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.702148] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.702148] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.702148] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.702148] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.702325] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.702325] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.702454] env[61839]: DEBUG nova.virt.hardware [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.704564] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03956ec0-fb55-408f-a834-54109cc1cba3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.714552] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c51fcd-8494-4c02-9bc2-584e3009a08f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.769459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Releasing lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.769459] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 567.769621] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 567.769933] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39f06900-739f-4d08-a424-63c773e635bf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.779343] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a48f6b-cc4d-466e-acde-b88e7b3ef1a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.791958] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. [ 567.791958] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 567.791958] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.791958] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 567.791958] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.791958] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 567.791958] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.791958] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 567.791958] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.791958] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 567.791958] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.791958] env[61839]: ERROR nova.compute.manager raise self.value [ 567.791958] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.791958] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 567.791958] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.791958] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 567.792634] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.792634] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 567.792634] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. [ 567.792634] env[61839]: ERROR nova.compute.manager [ 567.792634] env[61839]: Traceback (most recent call last): [ 567.792634] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 567.792634] env[61839]: listener.cb(fileno) [ 567.792634] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.792634] env[61839]: result = function(*args, **kwargs) [ 567.792634] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.792634] env[61839]: return func(*args, **kwargs) [ 567.792634] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.792634] env[61839]: raise e [ 567.792634] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.792634] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 567.792634] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.792634] env[61839]: created_port_ids = self._update_ports_for_instance( [ 567.792634] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.792634] env[61839]: with excutils.save_and_reraise_exception(): [ 567.792634] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.792634] env[61839]: self.force_reraise() [ 567.792634] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.792634] env[61839]: raise self.value [ 567.792634] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.792634] env[61839]: updated_port = self._update_port( [ 567.792634] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.792634] env[61839]: _ensure_no_port_binding_failure(port) [ 567.792634] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.792634] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 567.796536] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. [ 567.796536] env[61839]: Removing descriptor: 20 [ 567.796536] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Traceback (most recent call last): [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] yield resources [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self.driver.spawn(context, instance, image_meta, [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.796536] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] vm_ref = self.build_virtual_machine(instance, [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] for vif in network_info: [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return self._sync_wrapper(fn, *args, **kwargs) [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self.wait() [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self[:] = self._gt.wait() [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return self._exit_event.wait() [ 567.797634] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] result = hub.switch() [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return self.greenlet.switch() [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] result = function(*args, **kwargs) [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return func(*args, **kwargs) [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] raise e [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] nwinfo = self.network_api.allocate_for_instance( [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.798483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] created_port_ids = self._update_ports_for_instance( [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] with excutils.save_and_reraise_exception(): [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self.force_reraise() [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] raise self.value [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] updated_port = self._update_port( [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] _ensure_no_port_binding_failure(port) [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.798883] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] raise exception.PortBindingFailed(port_id=port['id']) [ 567.799257] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] nova.exception.PortBindingFailed: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. [ 567.799257] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] [ 567.799257] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Terminating instance [ 567.799257] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.800151] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquired lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.800428] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.805179] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29dcaaa2-04fe-4835-acc9-41c433e6165f could not be found. [ 567.806236] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 567.806236] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 567.806236] env[61839]: DEBUG oslo.service.loopingcall [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.806655] env[61839]: DEBUG nova.compute.manager [-] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 567.806707] env[61839]: DEBUG nova.network.neutron [-] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.865037] env[61839]: DEBUG nova.network.neutron [-] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.866725] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Successfully created port: e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.937740] env[61839]: INFO nova.compute.manager [-] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Took 1.03 seconds to deallocate network for instance. [ 567.941332] env[61839]: DEBUG nova.compute.claims [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 567.941332] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.188797] env[61839]: DEBUG nova.compute.manager [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Received event network-vif-deleted-9a01ee46-d236-4b9b-8c66-098e5658a725 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.189112] env[61839]: DEBUG nova.compute.manager [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Received event network-changed-02380c98-1108-4354-a18d-f9cabaed08c3 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.189881] env[61839]: DEBUG nova.compute.manager [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Refreshing instance network info cache due to event network-changed-02380c98-1108-4354-a18d-f9cabaed08c3. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 568.189881] env[61839]: DEBUG oslo_concurrency.lockutils [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] Acquiring lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.189881] env[61839]: DEBUG oslo_concurrency.lockutils [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] Acquired lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.189881] env[61839]: DEBUG nova.network.neutron [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Refreshing network info cache for port 02380c98-1108-4354-a18d-f9cabaed08c3 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.192482] env[61839]: DEBUG nova.compute.manager [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Received event network-changed-73783eb6-0ed5-45d6-b54a-aad8df2a83fd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.192691] env[61839]: DEBUG nova.compute.manager [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Refreshing instance network info cache due to event network-changed-73783eb6-0ed5-45d6-b54a-aad8df2a83fd. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 568.192890] env[61839]: DEBUG oslo_concurrency.lockutils [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] Acquiring lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.193045] env[61839]: DEBUG oslo_concurrency.lockutils [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] Acquired lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.193227] env[61839]: DEBUG nova.network.neutron [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Refreshing network info cache for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.212135] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.367127] env[61839]: DEBUG nova.network.neutron [-] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.396445] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.557055] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Acquiring lock "2432a14e-ec45-452c-9592-de690dbc102e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.558665] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Lock "2432a14e-ec45-452c-9592-de690dbc102e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.612219] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.664471] env[61839]: DEBUG oslo_concurrency.lockutils [None req-540fdcb6-3f39-4f2c-bc6d-6e0b40f83f5f tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.665663] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.637s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.730741] env[61839]: DEBUG nova.network.neutron [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.776345] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.780786] env[61839]: DEBUG nova.network.neutron [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.870787] env[61839]: INFO nova.compute.manager [-] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Took 1.06 seconds to deallocate network for instance. [ 568.874084] env[61839]: DEBUG nova.compute.claims [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 568.875015] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.991947] env[61839]: DEBUG nova.network.neutron [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.114851] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Releasing lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.115562] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 569.115772] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 569.116107] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56bece09-2abc-4e4f-9c7e-1eb5768b90d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.128408] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a4fc53-695d-42f0-a033-7c183b3f36bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.141300] env[61839]: DEBUG nova.network.neutron [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.156364] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ec27ab37-2351-4ad1-b41f-8de9bfab8b9d could not be found. [ 569.156748] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 569.158025] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 569.158025] env[61839]: DEBUG oslo.service.loopingcall [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.158914] env[61839]: DEBUG nova.compute.manager [-] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 569.159064] env[61839]: DEBUG nova.network.neutron [-] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.213801] env[61839]: DEBUG nova.network.neutron [-] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.279752] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Releasing lock "refresh_cache-9594f132-d558-4c75-872f-b1d1b7c08f66" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.280053] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 569.280188] env[61839]: DEBUG nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 569.280356] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.345652] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.494382] env[61839]: DEBUG oslo_concurrency.lockutils [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] Releasing lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.494674] env[61839]: DEBUG nova.compute.manager [req-afb50811-a640-4b0c-88d8-964d6e56e2a1 req-ad78475c-f6d2-48ef-9ffe-1899bf5f3e96 service nova] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Received event network-vif-deleted-02380c98-1108-4354-a18d-f9cabaed08c3 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 569.589273] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effa358e-2fc9-4d8a-8888-cd51fc1658e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.600514] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02179a7b-7eb2-46b3-8a7d-036f9afc8b94 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.638592] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c27a4c9-5658-4eef-8952-2667c8051284 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.644427] env[61839]: DEBUG oslo_concurrency.lockutils [req-068d0b93-13cf-4b94-9429-a2ff5aa64024 req-04d6850f-cc0f-4889-8cc3-5148b7a0e621 service nova] Releasing lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.648765] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518bd3c1-6d95-4da6-82e0-e320c75d99cb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.666593] env[61839]: DEBUG nova.compute.provider_tree [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.717162] env[61839]: DEBUG nova.network.neutron [-] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.853428] env[61839]: DEBUG nova.network.neutron [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.171116] env[61839]: DEBUG nova.scheduler.client.report [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 570.223657] env[61839]: INFO nova.compute.manager [-] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Took 1.06 seconds to deallocate network for instance. [ 570.226333] env[61839]: DEBUG nova.compute.claims [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 570.226333] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.358635] env[61839]: INFO nova.compute.manager [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] [instance: 9594f132-d558-4c75-872f-b1d1b7c08f66] Took 1.08 seconds to deallocate network for instance. [ 570.680680] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.015s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.681208] env[61839]: ERROR nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Traceback (most recent call last): [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self.driver.spawn(context, instance, image_meta, [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] vm_ref = self.build_virtual_machine(instance, [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.681208] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] for vif in network_info: [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return self._sync_wrapper(fn, *args, **kwargs) [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self.wait() [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self[:] = self._gt.wait() [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return self._exit_event.wait() [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] result = hub.switch() [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.681896] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return self.greenlet.switch() [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] result = function(*args, **kwargs) [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] return func(*args, **kwargs) [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] raise e [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] nwinfo = self.network_api.allocate_for_instance( [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] created_port_ids = self._update_ports_for_instance( [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] with excutils.save_and_reraise_exception(): [ 570.682324] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] self.force_reraise() [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] raise self.value [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] updated_port = self._update_port( [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] _ensure_no_port_binding_failure(port) [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] raise exception.PortBindingFailed(port_id=port['id']) [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] nova.exception.PortBindingFailed: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. [ 570.682719] env[61839]: ERROR nova.compute.manager [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] [ 570.683178] env[61839]: DEBUG nova.compute.utils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 570.683300] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.298s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.684733] env[61839]: INFO nova.compute.claims [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.690151] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Build of instance d0ed5c81-e05a-41a3-9e45-ae0a2a235f16 was re-scheduled: Binding failed for port 1e5d8b9c-cb09-4618-9e5b-c0f9a0e2583c, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 570.690754] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 570.690989] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Acquiring lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.691154] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Acquired lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.691331] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 570.885027] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 570.885027] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 570.885027] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.885027] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 570.885027] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.885027] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 570.885027] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.885027] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 570.885027] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.885027] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 570.885027] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.885027] env[61839]: ERROR nova.compute.manager raise self.value [ 570.885027] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.885027] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 570.885027] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.885027] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 570.885885] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.885885] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 570.885885] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 570.885885] env[61839]: ERROR nova.compute.manager [ 570.885885] env[61839]: Traceback (most recent call last): [ 570.885885] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 570.885885] env[61839]: listener.cb(fileno) [ 570.885885] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.885885] env[61839]: result = function(*args, **kwargs) [ 570.885885] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 570.885885] env[61839]: return func(*args, **kwargs) [ 570.885885] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.885885] env[61839]: raise e [ 570.885885] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.885885] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 570.885885] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.885885] env[61839]: created_port_ids = self._update_ports_for_instance( [ 570.885885] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.885885] env[61839]: with excutils.save_and_reraise_exception(): [ 570.885885] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.885885] env[61839]: self.force_reraise() [ 570.885885] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.885885] env[61839]: raise self.value [ 570.885885] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.885885] env[61839]: updated_port = self._update_port( [ 570.885885] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.885885] env[61839]: _ensure_no_port_binding_failure(port) [ 570.885885] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.885885] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 570.886873] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 570.886873] env[61839]: Removing descriptor: 15 [ 570.886873] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Acquiring lock "df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.886873] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Lock "df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.886873] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 570.886873] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] Traceback (most recent call last): [ 570.886873] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 570.886873] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] yield resources [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self.driver.spawn(context, instance, image_meta, [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] vm_ref = self.build_virtual_machine(instance, [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] for vif in network_info: [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return self._sync_wrapper(fn, *args, **kwargs) [ 570.887205] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self.wait() [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self[:] = self._gt.wait() [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return self._exit_event.wait() [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] result = hub.switch() [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return self.greenlet.switch() [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] result = function(*args, **kwargs) [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 570.887658] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return func(*args, **kwargs) [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] raise e [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] nwinfo = self.network_api.allocate_for_instance( [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] created_port_ids = self._update_ports_for_instance( [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] with excutils.save_and_reraise_exception(): [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self.force_reraise() [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] raise self.value [ 570.888041] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.888413] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] updated_port = self._update_port( [ 570.888413] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.888413] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] _ensure_no_port_binding_failure(port) [ 570.888413] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.888413] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] raise exception.PortBindingFailed(port_id=port['id']) [ 570.888413] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 570.888413] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] [ 570.888413] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Terminating instance [ 570.890142] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.890142] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquired lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.890142] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 571.255335] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.300044] env[61839]: DEBUG nova.compute.manager [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Received event network-changed-0a7d0ef0-1a14-4cc0-8b69-de52e22b919e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 571.300044] env[61839]: DEBUG nova.compute.manager [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Refreshing instance network info cache due to event network-changed-0a7d0ef0-1a14-4cc0-8b69-de52e22b919e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 571.300044] env[61839]: DEBUG oslo_concurrency.lockutils [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] Acquiring lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.300044] env[61839]: DEBUG oslo_concurrency.lockutils [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] Acquired lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.300044] env[61839]: DEBUG nova.network.neutron [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Refreshing network info cache for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 571.318552] env[61839]: DEBUG nova.compute.manager [req-bd536317-f02c-45bd-b8e8-a08060507158 req-5cd975f4-74d2-4289-a365-5cc8a24fcb18 service nova] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Received event network-vif-deleted-73783eb6-0ed5-45d6-b54a-aad8df2a83fd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 571.395041] env[61839]: INFO nova.scheduler.client.report [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Deleted allocations for instance 9594f132-d558-4c75-872f-b1d1b7c08f66 [ 571.438067] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.575409] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.691677] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "81ba4888-4b21-410f-ab86-a3068995836f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.691983] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "81ba4888-4b21-410f-ab86-a3068995836f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.746177] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.803405] env[61839]: ERROR nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. [ 571.803405] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 571.803405] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.803405] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 571.803405] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 571.803405] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 571.803405] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 571.803405] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 571.803405] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.803405] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 571.803405] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.803405] env[61839]: ERROR nova.compute.manager raise self.value [ 571.803405] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 571.803405] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 571.803405] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.803405] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 571.803979] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.803979] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 571.803979] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. [ 571.803979] env[61839]: ERROR nova.compute.manager [ 571.803979] env[61839]: Traceback (most recent call last): [ 571.804163] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 571.804163] env[61839]: listener.cb(fileno) [ 571.804163] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.804163] env[61839]: result = function(*args, **kwargs) [ 571.804163] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 571.804163] env[61839]: return func(*args, **kwargs) [ 571.804163] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.804163] env[61839]: raise e [ 571.804163] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.804163] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 571.804163] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 571.804163] env[61839]: created_port_ids = self._update_ports_for_instance( [ 571.804163] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 571.804163] env[61839]: with excutils.save_and_reraise_exception(): [ 571.804163] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.804163] env[61839]: self.force_reraise() [ 571.804163] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.804163] env[61839]: raise self.value [ 571.804163] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 571.804163] env[61839]: updated_port = self._update_port( [ 571.804163] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.804163] env[61839]: _ensure_no_port_binding_failure(port) [ 571.804163] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.804163] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 571.804163] env[61839]: nova.exception.PortBindingFailed: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. [ 571.804163] env[61839]: Removing descriptor: 17 [ 571.807846] env[61839]: ERROR nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Traceback (most recent call last): [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] yield resources [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self.driver.spawn(context, instance, image_meta, [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self._vmops.spawn(context, instance, image_meta, injected_files, [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] vm_ref = self.build_virtual_machine(instance, [ 571.807846] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] vif_infos = vmwarevif.get_vif_info(self._session, [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] for vif in network_info: [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return self._sync_wrapper(fn, *args, **kwargs) [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self.wait() [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self[:] = self._gt.wait() [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return self._exit_event.wait() [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 571.808265] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] result = hub.switch() [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return self.greenlet.switch() [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] result = function(*args, **kwargs) [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return func(*args, **kwargs) [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] raise e [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] nwinfo = self.network_api.allocate_for_instance( [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] created_port_ids = self._update_ports_for_instance( [ 571.808704] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] with excutils.save_and_reraise_exception(): [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self.force_reraise() [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] raise self.value [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] updated_port = self._update_port( [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] _ensure_no_port_binding_failure(port) [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] raise exception.PortBindingFailed(port_id=port['id']) [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] nova.exception.PortBindingFailed: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. [ 571.809158] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] [ 571.809593] env[61839]: INFO nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Terminating instance [ 571.816093] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Acquiring lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.816093] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Acquired lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.816093] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 571.835472] env[61839]: DEBUG nova.network.neutron [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.902758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e2e7743f-119e-4c3a-a685-1f1242f17cae tempest-VolumesAssistedSnapshotsTest-1863052206 tempest-VolumesAssistedSnapshotsTest-1863052206-project-member] Lock "9594f132-d558-4c75-872f-b1d1b7c08f66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.081s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.025234] env[61839]: DEBUG nova.network.neutron [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.068912] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c086b0-0906-40c4-85b7-00700c33455e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.078480] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010c46a6-e0a5-496d-b288-b7a15cbd72c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.081489] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Releasing lock "refresh_cache-d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.083299] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 572.083299] env[61839]: DEBUG nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 572.083299] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 572.111768] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.113455] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c8f2a7-21d2-4719-af3c-02220647d3c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.122102] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d9cdf3-9c06-4d6f-80bb-f7be499da98b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.138420] env[61839]: DEBUG nova.compute.provider_tree [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.249586] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Releasing lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.249586] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 572.250643] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 572.250643] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f02e50c-1cca-449e-97cc-a13f6e77a93d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.258945] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd0e672-33e0-442e-bb30-7181630ef1c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.280838] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 89843511-d201-431b-918d-e789e38e4f68 could not be found. [ 572.281054] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 572.281236] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Took 0.03 seconds to destroy the instance on the hypervisor. [ 572.281509] env[61839]: DEBUG oslo.service.loopingcall [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.281720] env[61839]: DEBUG nova.compute.manager [-] [instance: 89843511-d201-431b-918d-e789e38e4f68] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 572.281807] env[61839]: DEBUG nova.network.neutron [-] [instance: 89843511-d201-431b-918d-e789e38e4f68] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 572.300029] env[61839]: DEBUG nova.network.neutron [-] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.332495] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.405394] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 572.478752] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.528896] env[61839]: DEBUG oslo_concurrency.lockutils [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] Releasing lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.529241] env[61839]: DEBUG nova.compute.manager [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Received event network-vif-deleted-0a7d0ef0-1a14-4cc0-8b69-de52e22b919e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 572.529431] env[61839]: DEBUG nova.compute.manager [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: 89843511-d201-431b-918d-e789e38e4f68] Received event network-changed-6e1dec40-0217-4ce2-92ae-fb5211d70403 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 572.531681] env[61839]: DEBUG nova.compute.manager [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: 89843511-d201-431b-918d-e789e38e4f68] Refreshing instance network info cache due to event network-changed-6e1dec40-0217-4ce2-92ae-fb5211d70403. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 572.534194] env[61839]: DEBUG oslo_concurrency.lockutils [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] Acquiring lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.534414] env[61839]: DEBUG oslo_concurrency.lockutils [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] Acquired lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.534607] env[61839]: DEBUG nova.network.neutron [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: 89843511-d201-431b-918d-e789e38e4f68] Refreshing network info cache for port 6e1dec40-0217-4ce2-92ae-fb5211d70403 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 572.619256] env[61839]: DEBUG nova.network.neutron [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.641780] env[61839]: DEBUG nova.scheduler.client.report [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 572.749033] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Acquiring lock "2619b21e-084f-4003-af13-80382bfb1e2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.749033] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Lock "2619b21e-084f-4003-af13-80382bfb1e2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.803628] env[61839]: DEBUG nova.network.neutron [-] [instance: 89843511-d201-431b-918d-e789e38e4f68] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.932035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.981712] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Releasing lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.981877] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 572.982928] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 572.982928] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b231f196-2ab7-42f8-83dd-516d97fe8746 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.991812] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b17df74-8695-46fe-b32a-b1dde4d20c70 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.017111] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b78efda-51c7-4d51-be31-005ff0d44ede could not be found. [ 573.017362] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 573.017554] env[61839]: INFO nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Took 0.04 seconds to destroy the instance on the hypervisor. [ 573.017834] env[61839]: DEBUG oslo.service.loopingcall [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.018114] env[61839]: DEBUG nova.compute.manager [-] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 573.018159] env[61839]: DEBUG nova.network.neutron [-] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 573.059388] env[61839]: DEBUG nova.network.neutron [-] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.065872] env[61839]: DEBUG nova.network.neutron [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.124737] env[61839]: INFO nova.compute.manager [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] [instance: d0ed5c81-e05a-41a3-9e45-ae0a2a235f16] Took 1.04 seconds to deallocate network for instance. [ 573.150959] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.151527] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 573.154200] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.901s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.220951] env[61839]: DEBUG nova.network.neutron [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] [instance: 89843511-d201-431b-918d-e789e38e4f68] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.307144] env[61839]: INFO nova.compute.manager [-] [instance: 89843511-d201-431b-918d-e789e38e4f68] Took 1.02 seconds to deallocate network for instance. [ 573.310026] env[61839]: DEBUG nova.compute.claims [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 573.310026] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.373321] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Acquiring lock "0ab450ab-6416-464d-8140-a8c320abb69c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.373321] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Lock "0ab450ab-6416-464d-8140-a8c320abb69c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.561978] env[61839]: DEBUG nova.network.neutron [-] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.659961] env[61839]: DEBUG nova.compute.utils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 573.672428] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 573.672700] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 573.726430] env[61839]: DEBUG oslo_concurrency.lockutils [req-8e3e701e-eeec-435c-b798-08cc607539d4 req-3a2a2cfc-169e-435b-a185-375f893206dd service nova] Releasing lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.764827] env[61839]: DEBUG nova.policy [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5797efc45b46949228984c402eedf6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faf034f98a6e4b928bb109e7b1cade8a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 573.904131] env[61839]: DEBUG nova.compute.manager [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] [instance: 89843511-d201-431b-918d-e789e38e4f68] Received event network-vif-deleted-6e1dec40-0217-4ce2-92ae-fb5211d70403 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.904131] env[61839]: DEBUG nova.compute.manager [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Received event network-changed-e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.904131] env[61839]: DEBUG nova.compute.manager [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Refreshing instance network info cache due to event network-changed-e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 573.904131] env[61839]: DEBUG oslo_concurrency.lockutils [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] Acquiring lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.904131] env[61839]: DEBUG oslo_concurrency.lockutils [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] Acquired lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.906113] env[61839]: DEBUG nova.network.neutron [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Refreshing network info cache for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 574.070282] env[61839]: INFO nova.compute.manager [-] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Took 1.05 seconds to deallocate network for instance. [ 574.073413] env[61839]: DEBUG nova.compute.claims [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 574.073655] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.100169] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "3ea7af26-14b2-4371-a4f4-48afc190d4bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.100169] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "3ea7af26-14b2-4371-a4f4-48afc190d4bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.177435] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 574.203941] env[61839]: INFO nova.scheduler.client.report [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Deleted allocations for instance d0ed5c81-e05a-41a3-9e45-ae0a2a235f16 [ 574.221063] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Successfully created port: 788a594f-0cb1-4669-b641-d1f34ed13cc8 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 574.245730] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c43849-4bff-46ed-bb85-e2bf9d74a25b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.255679] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1768b71e-81ad-4760-a2f3-548cdbd66672 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.294583] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748ef368-4aa4-4a8d-9a66-0ab79d48eb06 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.302690] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3925127-8328-40a4-b901-d4a1e8389b08 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.316679] env[61839]: DEBUG nova.compute.provider_tree [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.450504] env[61839]: DEBUG nova.network.neutron [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.659756] env[61839]: DEBUG nova.network.neutron [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.720502] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a74a28c-d68f-49fe-a9f7-0d900d2e4df2 tempest-ServersWithSpecificFlavorTestJSON-2009030967 tempest-ServersWithSpecificFlavorTestJSON-2009030967-project-member] Lock "d0ed5c81-e05a-41a3-9e45-ae0a2a235f16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.490s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.819700] env[61839]: DEBUG nova.scheduler.client.report [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 575.165315] env[61839]: DEBUG oslo_concurrency.lockutils [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] Releasing lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.165315] env[61839]: DEBUG nova.compute.manager [req-c74d016f-27a0-4c76-85d6-6d10fceaf937 req-9a762ef1-457d-481d-a706-44e9d94ea361 service nova] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Received event network-vif-deleted-e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.187646] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 575.216348] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 575.216600] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 575.217216] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.217216] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 575.217216] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.217216] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 575.218751] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 575.218980] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 575.219217] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 575.219476] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 575.220033] env[61839]: DEBUG nova.virt.hardware [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 575.221553] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a081a5a8-1f15-4998-8f72-2a649349b0dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.227756] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 575.238017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5b0c5e-372b-493f-94e8-82e7b26f7faf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.329122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.175s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.329821] env[61839]: ERROR nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Traceback (most recent call last): [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self.driver.spawn(context, instance, image_meta, [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] vm_ref = self.build_virtual_machine(instance, [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] vif_infos = vmwarevif.get_vif_info(self._session, [ 575.329821] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] for vif in network_info: [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return self._sync_wrapper(fn, *args, **kwargs) [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self.wait() [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self[:] = self._gt.wait() [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return self._exit_event.wait() [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] result = hub.switch() [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 575.330274] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return self.greenlet.switch() [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] result = function(*args, **kwargs) [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] return func(*args, **kwargs) [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] raise e [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] nwinfo = self.network_api.allocate_for_instance( [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] created_port_ids = self._update_ports_for_instance( [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] with excutils.save_and_reraise_exception(): [ 575.330693] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] self.force_reraise() [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] raise self.value [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] updated_port = self._update_port( [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] _ensure_no_port_binding_failure(port) [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] raise exception.PortBindingFailed(port_id=port['id']) [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] nova.exception.PortBindingFailed: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. [ 575.331119] env[61839]: ERROR nova.compute.manager [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] [ 575.331552] env[61839]: DEBUG nova.compute.utils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 575.331778] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.227s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.331956] env[61839]: DEBUG nova.objects.instance [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 575.335097] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Build of instance 248ced8d-af78-4400-b9eb-449851b1bfa3 was re-scheduled: Binding failed for port 52531475-8b0c-433d-85e9-c0b56de31422, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 575.335581] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 575.335958] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.336170] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquired lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.336362] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 575.750091] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.871291] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.023744] env[61839]: ERROR nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. [ 576.023744] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 576.023744] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.023744] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 576.023744] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 576.023744] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 576.023744] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 576.023744] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 576.023744] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.023744] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 576.023744] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.023744] env[61839]: ERROR nova.compute.manager raise self.value [ 576.023744] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 576.023744] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 576.023744] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.023744] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 576.024256] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.024256] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 576.024256] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. [ 576.024256] env[61839]: ERROR nova.compute.manager [ 576.024256] env[61839]: Traceback (most recent call last): [ 576.024256] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 576.024256] env[61839]: listener.cb(fileno) [ 576.024256] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.024256] env[61839]: result = function(*args, **kwargs) [ 576.024256] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 576.024256] env[61839]: return func(*args, **kwargs) [ 576.024256] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.024256] env[61839]: raise e [ 576.024256] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.024256] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 576.024256] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 576.024256] env[61839]: created_port_ids = self._update_ports_for_instance( [ 576.024256] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 576.024256] env[61839]: with excutils.save_and_reraise_exception(): [ 576.024256] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.024256] env[61839]: self.force_reraise() [ 576.024256] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.024256] env[61839]: raise self.value [ 576.024256] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 576.024256] env[61839]: updated_port = self._update_port( [ 576.024256] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.024256] env[61839]: _ensure_no_port_binding_failure(port) [ 576.024256] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.024256] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 576.025078] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. [ 576.025078] env[61839]: Removing descriptor: 21 [ 576.025078] env[61839]: ERROR nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Traceback (most recent call last): [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] yield resources [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self.driver.spawn(context, instance, image_meta, [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.025078] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] vm_ref = self.build_virtual_machine(instance, [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] for vif in network_info: [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return self._sync_wrapper(fn, *args, **kwargs) [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self.wait() [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self[:] = self._gt.wait() [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return self._exit_event.wait() [ 576.025843] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] result = hub.switch() [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return self.greenlet.switch() [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] result = function(*args, **kwargs) [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return func(*args, **kwargs) [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] raise e [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] nwinfo = self.network_api.allocate_for_instance( [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 576.026235] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] created_port_ids = self._update_ports_for_instance( [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] with excutils.save_and_reraise_exception(): [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self.force_reraise() [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] raise self.value [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] updated_port = self._update_port( [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] _ensure_no_port_binding_failure(port) [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.026631] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] raise exception.PortBindingFailed(port_id=port['id']) [ 576.027438] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] nova.exception.PortBindingFailed: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. [ 576.027438] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] [ 576.027438] env[61839]: INFO nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Terminating instance [ 576.027891] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Acquiring lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.028234] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Acquired lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.028234] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 576.029793] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.217215] env[61839]: DEBUG nova.compute.manager [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Received event network-changed-788a594f-0cb1-4669-b641-d1f34ed13cc8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 576.218194] env[61839]: DEBUG nova.compute.manager [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Refreshing instance network info cache due to event network-changed-788a594f-0cb1-4669-b641-d1f34ed13cc8. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 576.219528] env[61839]: DEBUG oslo_concurrency.lockutils [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] Acquiring lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.342880] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6b23937a-feae-42bc-8255-54b6d335ea09 tempest-ServersAdmin275Test-1522278051 tempest-ServersAdmin275Test-1522278051-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.343988] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.049s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.344231] env[61839]: DEBUG nova.objects.instance [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lazy-loading 'resources' on Instance uuid 41823a25-5ff2-4838-854d-5bada8e5daca {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 576.534176] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Releasing lock "refresh_cache-248ced8d-af78-4400-b9eb-449851b1bfa3" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.534410] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 576.534574] env[61839]: DEBUG nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 576.534740] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 576.584891] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.590081] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.807883] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.090453] env[61839]: DEBUG nova.network.neutron [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.266909] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c89073-86b4-47a3-b6e6-0d5b746984b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.275707] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352a5449-3b14-44ca-afab-db6310471964 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.306877] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2a48f2-2eb7-41c6-a08c-3b3e12f689e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.311467] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Releasing lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.311777] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 577.311996] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 577.315174] env[61839]: DEBUG oslo_concurrency.lockutils [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] Acquired lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.315374] env[61839]: DEBUG nova.network.neutron [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Refreshing network info cache for port 788a594f-0cb1-4669-b641-d1f34ed13cc8 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 577.316647] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a15e66f-f17a-41f2-830f-6efc686cecab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.319787] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514458b7-953c-48fb-9f3f-bcd5d2cae00f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.346200] env[61839]: DEBUG nova.compute.provider_tree [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.355905] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16733449-37a6-48f1-88e4-d070c5e49fe6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.379464] env[61839]: DEBUG nova.scheduler.client.report [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 577.400731] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae1917f8-29af-43cc-8397-3b9072acee6c could not be found. [ 577.401069] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 577.401380] env[61839]: INFO nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Took 0.09 seconds to destroy the instance on the hypervisor. [ 577.401785] env[61839]: DEBUG oslo.service.loopingcall [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.402473] env[61839]: DEBUG nova.compute.manager [-] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 577.402619] env[61839]: DEBUG nova.network.neutron [-] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 577.424302] env[61839]: DEBUG nova.network.neutron [-] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.594837] env[61839]: INFO nova.compute.manager [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 248ced8d-af78-4400-b9eb-449851b1bfa3] Took 1.06 seconds to deallocate network for instance. [ 577.853026] env[61839]: DEBUG nova.network.neutron [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.886802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.543s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.890345] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.073s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.890345] env[61839]: DEBUG nova.objects.instance [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lazy-loading 'resources' on Instance uuid 211e8267-3c33-42c8-852f-1c20d7987453 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 577.922703] env[61839]: INFO nova.scheduler.client.report [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Deleted allocations for instance 41823a25-5ff2-4838-854d-5bada8e5daca [ 577.927137] env[61839]: DEBUG nova.network.neutron [-] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.083681] env[61839]: DEBUG nova.network.neutron [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.434740] env[61839]: INFO nova.compute.manager [-] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Took 1.03 seconds to deallocate network for instance. [ 578.436556] env[61839]: DEBUG oslo_concurrency.lockutils [None req-62980ca7-dd2c-495e-b429-ffc9668e86f8 tempest-ServerDiagnosticsV248Test-524179873 tempest-ServerDiagnosticsV248Test-524179873-project-member] Lock "41823a25-5ff2-4838-854d-5bada8e5daca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.301s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.438979] env[61839]: DEBUG nova.compute.claims [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 578.444608] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.525183] env[61839]: DEBUG nova.compute.manager [req-cfe8dfd9-f650-4eee-ac11-490f6c2b59ec req-130ea296-a788-47d8-9709-783595e2ac30 service nova] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Received event network-vif-deleted-788a594f-0cb1-4669-b641-d1f34ed13cc8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 578.586854] env[61839]: DEBUG oslo_concurrency.lockutils [req-da54d842-d891-46dc-8905-9662468dbf50 req-b4d9b821-fe47-4ec4-a096-c208dcc27149 service nova] Releasing lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.636714] env[61839]: INFO nova.scheduler.client.report [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Deleted allocations for instance 248ced8d-af78-4400-b9eb-449851b1bfa3 [ 578.922872] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d3669a-3d45-410f-9852-6ecb4d4c08d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.935072] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd7f95d-49b8-4942-8c65-21ce266ff5cf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.975423] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afc1b0d-6cf3-4448-aa31-4491feb7c682 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.984744] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc457127-6087-4093-a051-9b9aebbff1b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.996301] env[61839]: DEBUG nova.compute.provider_tree [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.143458] env[61839]: DEBUG oslo_concurrency.lockutils [None req-285e2342-47a9-4a7d-855a-b08b1b98031b tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "248ced8d-af78-4400-b9eb-449851b1bfa3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.585s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.502019] env[61839]: DEBUG nova.scheduler.client.report [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 579.646240] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 580.006028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.009853] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.158s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.039967] env[61839]: INFO nova.scheduler.client.report [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Deleted allocations for instance 211e8267-3c33-42c8-852f-1c20d7987453 [ 580.183968] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.559574] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1b91a866-cb6a-4f66-9111-9caa2300b964 tempest-ServersAdmin275Test-1612766627 tempest-ServersAdmin275Test-1612766627-project-member] Lock "211e8267-3c33-42c8-852f-1c20d7987453" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.811s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.035369] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d727697-2865-4b33-b489-ea6e862256b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.045628] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6db4b2-d228-43b4-985b-364544137d7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.085541] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270e8467-f5e3-484d-ba1f-0023aa27666b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.092665] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f277f802-39d2-41bb-9747-3c7dd40a014d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.108942] env[61839]: DEBUG nova.compute.provider_tree [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.613960] env[61839]: DEBUG nova.scheduler.client.report [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 582.123260] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.111s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.123260] env[61839]: ERROR nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. [ 582.123260] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Traceback (most recent call last): [ 582.123260] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 582.123260] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self.driver.spawn(context, instance, image_meta, [ 582.123260] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 582.123260] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self._vmops.spawn(context, instance, image_meta, injected_files, [ 582.123260] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 582.123260] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] vm_ref = self.build_virtual_machine(instance, [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] vif_infos = vmwarevif.get_vif_info(self._session, [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] for vif in network_info: [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return self._sync_wrapper(fn, *args, **kwargs) [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self.wait() [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self[:] = self._gt.wait() [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return self._exit_event.wait() [ 582.123637] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] result = hub.switch() [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return self.greenlet.switch() [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] result = function(*args, **kwargs) [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] return func(*args, **kwargs) [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] raise e [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] nwinfo = self.network_api.allocate_for_instance( [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 582.124049] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] created_port_ids = self._update_ports_for_instance( [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] with excutils.save_and_reraise_exception(): [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] self.force_reraise() [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] raise self.value [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] updated_port = self._update_port( [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] _ensure_no_port_binding_failure(port) [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.124424] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] raise exception.PortBindingFailed(port_id=port['id']) [ 582.124813] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] nova.exception.PortBindingFailed: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. [ 582.124813] env[61839]: ERROR nova.compute.manager [instance: d0822954-42c2-4003-baf4-97bc2ce65768] [ 582.124813] env[61839]: DEBUG nova.compute.utils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 582.128131] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.186s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.130538] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Build of instance d0822954-42c2-4003-baf4-97bc2ce65768 was re-scheduled: Binding failed for port 9a01ee46-d236-4b9b-8c66-098e5658a725, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 582.131160] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 582.131424] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Acquiring lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.131670] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Acquired lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.131919] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 582.697504] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.933459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Acquiring lock "ad32bc49-5e52-468a-9d93-390c8649dcae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.933664] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Lock "ad32bc49-5e52-468a-9d93-390c8649dcae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.959149] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.183327] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec60dc5-79d0-4118-b4b2-572fb5fd654a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.194680] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81920318-4cfd-45de-9cb6-82d9acd94954 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.231734] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a833a02f-3be3-4b20-97dc-853f2dd8dc69 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.240548] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecefb99-06ac-49c4-861c-72c77c373a26 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.258083] env[61839]: DEBUG nova.compute.provider_tree [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.462273] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Releasing lock "refresh_cache-d0822954-42c2-4003-baf4-97bc2ce65768" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.462543] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 583.462719] env[61839]: DEBUG nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 583.462888] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 583.490349] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.770132] env[61839]: DEBUG nova.scheduler.client.report [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.926873] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "83270007-2cbd-49a5-b3a1-1ad58ea2a66c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.927888] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "83270007-2cbd-49a5-b3a1-1ad58ea2a66c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.993985] env[61839]: DEBUG nova.network.neutron [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.275121] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.147s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.275121] env[61839]: ERROR nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. [ 584.275121] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] Traceback (most recent call last): [ 584.275121] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 584.275121] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self.driver.spawn(context, instance, image_meta, [ 584.275121] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 584.275121] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.275121] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.275121] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] vm_ref = self.build_virtual_machine(instance, [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] for vif in network_info: [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return self._sync_wrapper(fn, *args, **kwargs) [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self.wait() [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self[:] = self._gt.wait() [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return self._exit_event.wait() [ 584.275614] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] result = hub.switch() [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return self.greenlet.switch() [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] result = function(*args, **kwargs) [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] return func(*args, **kwargs) [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] raise e [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] nwinfo = self.network_api.allocate_for_instance( [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.276081] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] created_port_ids = self._update_ports_for_instance( [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] with excutils.save_and_reraise_exception(): [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] self.force_reraise() [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] raise self.value [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] updated_port = self._update_port( [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] _ensure_no_port_binding_failure(port) [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.278366] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] raise exception.PortBindingFailed(port_id=port['id']) [ 584.278811] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] nova.exception.PortBindingFailed: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. [ 584.278811] env[61839]: ERROR nova.compute.manager [instance: 2032b746-2161-4487-ae4c-6159313241f4] [ 584.278811] env[61839]: DEBUG nova.compute.utils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 584.279701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.405s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.283479] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Build of instance 2032b746-2161-4487-ae4c-6159313241f4 was re-scheduled: Binding failed for port 02380c98-1108-4354-a18d-f9cabaed08c3, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 584.283971] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 584.284212] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Acquiring lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.284364] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Acquired lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.284682] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 584.498456] env[61839]: INFO nova.compute.manager [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] [instance: d0822954-42c2-4003-baf4-97bc2ce65768] Took 1.03 seconds to deallocate network for instance. [ 584.820168] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.064363] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.255380] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61870234-e3f2-463c-b15a-a59a4377be22 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.268070] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c6e794-b48a-4663-8de2-5e13410c09c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.303428] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e9cb6a-f64f-4048-8fe7-afd47c07d0a0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.307584] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Acquiring lock "5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.307776] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Lock "5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.314407] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe3056d-91a6-424d-8310-7b56976abbda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.330691] env[61839]: DEBUG nova.compute.provider_tree [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.532460] env[61839]: INFO nova.scheduler.client.report [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Deleted allocations for instance d0822954-42c2-4003-baf4-97bc2ce65768 [ 585.567538] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Releasing lock "refresh_cache-2032b746-2161-4487-ae4c-6159313241f4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.567644] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 585.567822] env[61839]: DEBUG nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 585.567987] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 585.610659] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.834519] env[61839]: DEBUG nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 586.040413] env[61839]: DEBUG oslo_concurrency.lockutils [None req-26fc30cc-4ad1-4009-975c-8d3de6d95ff0 tempest-ServerExternalEventsTest-1581584313 tempest-ServerExternalEventsTest-1581584313-project-member] Lock "d0822954-42c2-4003-baf4-97bc2ce65768" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.694s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.114899] env[61839]: DEBUG nova.network.neutron [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.343116] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.063s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.343892] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Traceback (most recent call last): [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self.driver.spawn(context, instance, image_meta, [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] vm_ref = self.build_virtual_machine(instance, [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] vif_infos = vmwarevif.get_vif_info(self._session, [ 586.343892] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] for vif in network_info: [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return self._sync_wrapper(fn, *args, **kwargs) [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self.wait() [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self[:] = self._gt.wait() [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return self._exit_event.wait() [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] result = hub.switch() [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 586.344367] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return self.greenlet.switch() [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] result = function(*args, **kwargs) [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] return func(*args, **kwargs) [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] raise e [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] nwinfo = self.network_api.allocate_for_instance( [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] created_port_ids = self._update_ports_for_instance( [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] with excutils.save_and_reraise_exception(): [ 586.344776] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] self.force_reraise() [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] raise self.value [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] updated_port = self._update_port( [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] _ensure_no_port_binding_failure(port) [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] raise exception.PortBindingFailed(port_id=port['id']) [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] nova.exception.PortBindingFailed: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. [ 586.345168] env[61839]: ERROR nova.compute.manager [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] [ 586.345487] env[61839]: DEBUG nova.compute.utils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 586.345838] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.120s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.349939] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Build of instance 29dcaaa2-04fe-4835-acc9-41c433e6165f was re-scheduled: Binding failed for port 73783eb6-0ed5-45d6-b54a-aad8df2a83fd, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 586.350368] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 586.350591] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.350753] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquired lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.350889] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 586.432926] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "97560b6e-0f50-4cc0-b620-305c82938390" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.433378] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "97560b6e-0f50-4cc0-b620-305c82938390" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.489294] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.489691] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.543320] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 586.621772] env[61839]: INFO nova.compute.manager [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] [instance: 2032b746-2161-4487-ae4c-6159313241f4] Took 1.05 seconds to deallocate network for instance. [ 586.932587] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.999383] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.000573] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.000573] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.001258] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.074492] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.286127] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.426916] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34570aa6-ff82-4b95-8609-32e10b9e3e22 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.439279] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f89ac7-d470-44e3-99e5-e93b9430c4a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.476838] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48923239-a6ee-4bd0-8e72-977704427bb5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.485969] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19cd81e-76fa-4cd3-becb-06e5f4fb6c0a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.501725] env[61839]: DEBUG nova.compute.provider_tree [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.507912] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.670532] env[61839]: INFO nova.scheduler.client.report [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Deleted allocations for instance 2032b746-2161-4487-ae4c-6159313241f4 [ 587.789334] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Releasing lock "refresh_cache-29dcaaa2-04fe-4835-acc9-41c433e6165f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.791929] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 587.791929] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 587.791929] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.847729] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.009745] env[61839]: DEBUG nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.185284] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0477abbc-1d43-4051-b701-9bb77cd00e21 tempest-ServerRescueTestJSON-1450719199 tempest-ServerRescueTestJSON-1450719199-project-member] Lock "2032b746-2161-4487-ae4c-6159313241f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.734s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.354523] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.520209] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.172s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.520209] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. [ 588.520209] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Traceback (most recent call last): [ 588.520209] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 588.520209] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self.driver.spawn(context, instance, image_meta, [ 588.520209] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 588.520209] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 588.520209] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 588.520209] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] vm_ref = self.build_virtual_machine(instance, [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] vif_infos = vmwarevif.get_vif_info(self._session, [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] for vif in network_info: [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return self._sync_wrapper(fn, *args, **kwargs) [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self.wait() [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self[:] = self._gt.wait() [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return self._exit_event.wait() [ 588.520629] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] result = hub.switch() [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return self.greenlet.switch() [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] result = function(*args, **kwargs) [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] return func(*args, **kwargs) [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] raise e [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] nwinfo = self.network_api.allocate_for_instance( [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 588.521047] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] created_port_ids = self._update_ports_for_instance( [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] with excutils.save_and_reraise_exception(): [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] self.force_reraise() [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] raise self.value [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] updated_port = self._update_port( [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] _ensure_no_port_binding_failure(port) [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.521483] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] raise exception.PortBindingFailed(port_id=port['id']) [ 588.521929] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] nova.exception.PortBindingFailed: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. [ 588.521929] env[61839]: ERROR nova.compute.manager [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] [ 588.521929] env[61839]: DEBUG nova.compute.utils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 588.531452] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.596s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.531452] env[61839]: INFO nova.compute.claims [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.536677] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Build of instance ec27ab37-2351-4ad1-b41f-8de9bfab8b9d was re-scheduled: Binding failed for port 0a7d0ef0-1a14-4cc0-8b69-de52e22b919e, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 588.537611] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 588.537876] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.538077] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquired lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.538274] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 588.688835] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 588.861795] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 29dcaaa2-04fe-4835-acc9-41c433e6165f] Took 1.07 seconds to deallocate network for instance. [ 589.082593] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.232411] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.290930] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.801693] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Releasing lock "refresh_cache-ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.802239] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 589.802239] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 589.802531] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 589.839433] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.923048] env[61839]: INFO nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Deleted allocations for instance 29dcaaa2-04fe-4835-acc9-41c433e6165f [ 590.078652] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4107b599-f3ba-4581-a077-15e060debb93 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.091139] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2d2d17-d23f-4fef-af62-565c80fc2a2b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.126708] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f81e72-b69e-4958-901e-22d871b5a762 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.134209] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2101b97c-a1f7-43fd-93f2-be691e6a40a2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.152328] env[61839]: DEBUG nova.compute.provider_tree [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.159267] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "d608405b-20d9-42ab-97e3-e129f9c1448b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.159398] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "d608405b-20d9-42ab-97e3-e129f9c1448b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.342090] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.437691] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "29dcaaa2-04fe-4835-acc9-41c433e6165f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.572s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.661764] env[61839]: DEBUG nova.scheduler.client.report [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 590.847780] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: ec27ab37-2351-4ad1-b41f-8de9bfab8b9d] Took 1.05 seconds to deallocate network for instance. [ 590.940108] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 591.172829] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.173388] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 591.183290] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.869s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.341090] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Acquiring lock "eca07795-319e-401d-8f05-41a29bab2689" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.341321] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Lock "eca07795-319e-401d-8f05-41a29bab2689" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.468396] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.680185] env[61839]: DEBUG nova.compute.utils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.683007] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 591.683717] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 591.786229] env[61839]: DEBUG nova.policy [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43e70ba616514dcfa4f78a44563fb3d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '315d70fd2c0742bd9d6cb00883bb6179', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 591.911830] env[61839]: INFO nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Deleted allocations for instance ec27ab37-2351-4ad1-b41f-8de9bfab8b9d [ 591.987084] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.987342] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.109119] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Acquiring lock "4b7ef74e-4018-4c6e-b540-d65c986d1ff2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.109354] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Lock "4b7ef74e-4018-4c6e-b540-d65c986d1ff2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.187511] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 592.270358] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2abebac-46fe-42f4-83dd-4ccee94e169c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.278162] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f74bcc9-8514-4db5-9406-6410576a13d9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.318463] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6828aa09-977d-4156-8725-8080d442101c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.327961] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afad0c9-2aca-41bc-9364-427a87c01540 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.335288] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Successfully created port: efae4232-1d88-49a1-ae17-8b83c8c13197 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 592.348530] env[61839]: DEBUG nova.compute.provider_tree [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.426887] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "ec27ab37-2351-4ad1-b41f-8de9bfab8b9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.591s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.851534] env[61839]: DEBUG nova.scheduler.client.report [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.891011] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "56993a6d-de55-4648-9fd9-31d06a57f300" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.891650] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "56993a6d-de55-4648-9fd9-31d06a57f300" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.929536] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 593.205053] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 593.248400] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 593.248400] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 593.248400] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.248525] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 593.249210] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.249539] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 593.249887] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 593.250838] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 593.250838] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 593.250838] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 593.250838] env[61839]: DEBUG nova.virt.hardware [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 593.251725] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a72e9d2-d00c-4acc-ab33-f498254c9230 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.261840] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d63c476-6e51-45d4-b128-92b6e49b1af5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.358472] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.180s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.358669] env[61839]: ERROR nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] Traceback (most recent call last): [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self.driver.spawn(context, instance, image_meta, [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] vm_ref = self.build_virtual_machine(instance, [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.358669] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] for vif in network_info: [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return self._sync_wrapper(fn, *args, **kwargs) [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self.wait() [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self[:] = self._gt.wait() [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return self._exit_event.wait() [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] result = hub.switch() [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.358919] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return self.greenlet.switch() [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] result = function(*args, **kwargs) [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] return func(*args, **kwargs) [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] raise e [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] nwinfo = self.network_api.allocate_for_instance( [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] created_port_ids = self._update_ports_for_instance( [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] with excutils.save_and_reraise_exception(): [ 593.359226] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] self.force_reraise() [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] raise self.value [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] updated_port = self._update_port( [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] _ensure_no_port_binding_failure(port) [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] raise exception.PortBindingFailed(port_id=port['id']) [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 593.359485] env[61839]: ERROR nova.compute.manager [instance: 89843511-d201-431b-918d-e789e38e4f68] [ 593.359726] env[61839]: DEBUG nova.compute.utils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 593.360648] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.287s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.364388] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Build of instance 89843511-d201-431b-918d-e789e38e4f68 was re-scheduled: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 593.365165] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 593.365448] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquiring lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.365643] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Acquired lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.365846] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.459046] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.873456] env[61839]: DEBUG nova.compute.utils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Can not refresh info_cache because instance was not found {{(pid=61839) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 593.913870] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.179085] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.338269] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1936db-178f-4a3f-8f26-87fb861fe5c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.350233] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9a2e3d-4f92-4043-8fdc-88ba8709aee0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.388691] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8ddad9-b503-41d8-b0f5-6aadd40dc650 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.397342] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c28f2b1-11a2-4100-92a0-8a4a1cb66ca3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.412466] env[61839]: DEBUG nova.compute.provider_tree [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.675468] env[61839]: DEBUG nova.compute.manager [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Received event network-changed-efae4232-1d88-49a1-ae17-8b83c8c13197 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 594.675468] env[61839]: DEBUG nova.compute.manager [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Refreshing instance network info cache due to event network-changed-efae4232-1d88-49a1-ae17-8b83c8c13197. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 594.675468] env[61839]: DEBUG oslo_concurrency.lockutils [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] Acquiring lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.675468] env[61839]: DEBUG oslo_concurrency.lockutils [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] Acquired lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.675468] env[61839]: DEBUG nova.network.neutron [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Refreshing network info cache for port efae4232-1d88-49a1-ae17-8b83c8c13197 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 594.683664] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Releasing lock "refresh_cache-89843511-d201-431b-918d-e789e38e4f68" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.683664] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 594.683664] env[61839]: DEBUG nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.683664] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 594.731980] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.915760] env[61839]: DEBUG nova.scheduler.client.report [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 594.957353] env[61839]: ERROR nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. [ 594.957353] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 594.957353] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.957353] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 594.957353] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.957353] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 594.957353] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.957353] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 594.957353] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.957353] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 594.957353] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.957353] env[61839]: ERROR nova.compute.manager raise self.value [ 594.957353] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.957353] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 594.957353] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.957353] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 594.957741] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.957741] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 594.957741] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. [ 594.957741] env[61839]: ERROR nova.compute.manager [ 594.960940] env[61839]: Traceback (most recent call last): [ 594.960940] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 594.960940] env[61839]: listener.cb(fileno) [ 594.960940] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.960940] env[61839]: result = function(*args, **kwargs) [ 594.960940] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.960940] env[61839]: return func(*args, **kwargs) [ 594.960940] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.960940] env[61839]: raise e [ 594.960940] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.960940] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 594.960940] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.960940] env[61839]: created_port_ids = self._update_ports_for_instance( [ 594.960940] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.960940] env[61839]: with excutils.save_and_reraise_exception(): [ 594.960940] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.960940] env[61839]: self.force_reraise() [ 594.960940] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.960940] env[61839]: raise self.value [ 594.960940] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.960940] env[61839]: updated_port = self._update_port( [ 594.960940] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.960940] env[61839]: _ensure_no_port_binding_failure(port) [ 594.960940] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.960940] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 594.960940] env[61839]: nova.exception.PortBindingFailed: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. [ 594.960940] env[61839]: Removing descriptor: 17 [ 594.961832] env[61839]: ERROR nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Traceback (most recent call last): [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] yield resources [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self.driver.spawn(context, instance, image_meta, [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] vm_ref = self.build_virtual_machine(instance, [ 594.961832] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] for vif in network_info: [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return self._sync_wrapper(fn, *args, **kwargs) [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self.wait() [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self[:] = self._gt.wait() [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return self._exit_event.wait() [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.962109] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] result = hub.switch() [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return self.greenlet.switch() [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] result = function(*args, **kwargs) [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return func(*args, **kwargs) [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] raise e [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] nwinfo = self.network_api.allocate_for_instance( [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] created_port_ids = self._update_ports_for_instance( [ 594.962458] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] with excutils.save_and_reraise_exception(): [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self.force_reraise() [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] raise self.value [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] updated_port = self._update_port( [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] _ensure_no_port_binding_failure(port) [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] raise exception.PortBindingFailed(port_id=port['id']) [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] nova.exception.PortBindingFailed: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. [ 594.962737] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] [ 594.963016] env[61839]: INFO nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Terminating instance [ 594.965566] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Acquiring lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.211176] env[61839]: DEBUG nova.network.neutron [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.236048] env[61839]: DEBUG nova.network.neutron [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.421972] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.061s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.425893] env[61839]: ERROR nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Traceback (most recent call last): [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self.driver.spawn(context, instance, image_meta, [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] vm_ref = self.build_virtual_machine(instance, [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] vif_infos = vmwarevif.get_vif_info(self._session, [ 595.425893] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] for vif in network_info: [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return self._sync_wrapper(fn, *args, **kwargs) [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self.wait() [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self[:] = self._gt.wait() [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return self._exit_event.wait() [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] result = hub.switch() [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.426301] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return self.greenlet.switch() [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] result = function(*args, **kwargs) [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] return func(*args, **kwargs) [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] raise e [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] nwinfo = self.network_api.allocate_for_instance( [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] created_port_ids = self._update_ports_for_instance( [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] with excutils.save_and_reraise_exception(): [ 595.426634] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] self.force_reraise() [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] raise self.value [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] updated_port = self._update_port( [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] _ensure_no_port_binding_failure(port) [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] raise exception.PortBindingFailed(port_id=port['id']) [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] nova.exception.PortBindingFailed: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. [ 595.426901] env[61839]: ERROR nova.compute.manager [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] [ 595.427347] env[61839]: DEBUG nova.compute.utils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 595.427347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.675s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.427347] env[61839]: INFO nova.compute.claims [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.429158] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Build of instance 0b78efda-51c7-4d51-be31-005ff0d44ede was re-scheduled: Binding failed for port e2a37b48-e4f8-4dc0-ac05-2fc0ce8bc49f, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 595.429687] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 595.429857] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Acquiring lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.430010] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Acquired lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.430173] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 595.642094] env[61839]: DEBUG nova.network.neutron [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.741258] env[61839]: INFO nova.compute.manager [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] [instance: 89843511-d201-431b-918d-e789e38e4f68] Took 1.06 seconds to deallocate network for instance. [ 595.986811] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.146043] env[61839]: DEBUG oslo_concurrency.lockutils [req-a29564e5-d54c-4913-8879-84eefe3aafe9 req-8c08ec79-2b72-41e1-b488-d703a063d2f4 service nova] Releasing lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.146043] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Acquired lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.146043] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.228215] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.700116] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.733099] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Releasing lock "refresh_cache-0b78efda-51c7-4d51-be31-005ff0d44ede" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.733624] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 596.733624] env[61839]: DEBUG nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 596.734370] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 596.779665] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.797391] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "50140f35-6282-41dc-a66c-f041f33769d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.797680] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "50140f35-6282-41dc-a66c-f041f33769d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.874320] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc1c336-e29a-48d0-bf68-97aca988739c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.882960] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72eb9444-5854-4a2e-8d2d-e3611fcca736 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.914390] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27b8eb0-1451-4563-910c-2847756fd726 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.923388] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcf0fe4-7460-4bf2-9ba5-aad89d3ffb82 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.940022] env[61839]: DEBUG nova.compute.provider_tree [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.130153] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.265497] env[61839]: DEBUG nova.compute.manager [req-55f72fc7-4e0a-4776-89ff-a01a2f930259 req-4785e920-fb11-42a9-9de9-bacdfbc7fb95 service nova] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Received event network-vif-deleted-efae4232-1d88-49a1-ae17-8b83c8c13197 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.283483] env[61839]: DEBUG nova.network.neutron [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.444160] env[61839]: DEBUG nova.scheduler.client.report [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.637918] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Releasing lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.637918] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 597.637918] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 597.637918] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e77234d0-cb1a-4e83-a7ef-b900506b54bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.649950] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd0d82f-7796-4a89-ae6e-cd630f395c12 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.681200] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1edc2966-2edc-453e-a80d-c4139d910a6d could not be found. [ 597.681200] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 597.681200] env[61839]: INFO nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 597.681200] env[61839]: DEBUG oslo.service.loopingcall [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 597.681584] env[61839]: DEBUG nova.compute.manager [-] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 597.681800] env[61839]: DEBUG nova.network.neutron [-] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 597.712376] env[61839]: DEBUG nova.network.neutron [-] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.787518] env[61839]: INFO nova.compute.manager [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] [instance: 0b78efda-51c7-4d51-be31-005ff0d44ede] Took 1.05 seconds to deallocate network for instance. [ 597.792454] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0a7e529-915f-4987-b1c5-b6434ed996b4 tempest-ListServersNegativeTestJSON-1055848737 tempest-ListServersNegativeTestJSON-1055848737-project-member] Lock "89843511-d201-431b-918d-e789e38e4f68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.894s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.793525] env[61839]: Traceback (most recent call last): [ 597.793586] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 597.793586] env[61839]: self.driver.spawn(context, instance, image_meta, [ 597.793586] env[61839]: File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 597.793586] env[61839]: self._vmops.spawn(context, instance, image_meta, injected_files, [ 597.793586] env[61839]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 597.793586] env[61839]: vm_ref = self.build_virtual_machine(instance, [ 597.793586] env[61839]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 597.793586] env[61839]: vif_infos = vmwarevif.get_vif_info(self._session, [ 597.793586] env[61839]: File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 597.793586] env[61839]: for vif in network_info: [ 597.793586] env[61839]: File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 597.793586] env[61839]: return self._sync_wrapper(fn, *args, **kwargs) [ 597.793586] env[61839]: File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 597.793586] env[61839]: self.wait() [ 597.793586] env[61839]: File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 597.793586] env[61839]: self[:] = self._gt.wait() [ 597.793586] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 597.793586] env[61839]: return self._exit_event.wait() [ 597.793586] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 597.793586] env[61839]: result = hub.switch() [ 597.793586] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 597.793586] env[61839]: return self.greenlet.switch() [ 597.793586] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.793586] env[61839]: result = function(*args, **kwargs) [ 597.793586] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.793586] env[61839]: return func(*args, **kwargs) [ 597.793586] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.793586] env[61839]: raise e [ 597.794224] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.794224] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 597.794224] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.794224] env[61839]: created_port_ids = self._update_ports_for_instance( [ 597.794224] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.794224] env[61839]: with excutils.save_and_reraise_exception(): [ 597.794224] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.794224] env[61839]: self.force_reraise() [ 597.794224] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.794224] env[61839]: raise self.value [ 597.794224] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.794224] env[61839]: updated_port = self._update_port( [ 597.794224] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.794224] env[61839]: _ensure_no_port_binding_failure(port) [ 597.794224] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.794224] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 597.794224] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 597.794224] env[61839]: During handling of the above exception, another exception occurred: [ 597.794224] env[61839]: Traceback (most recent call last): [ 597.794224] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2456, in _do_build_and_run_instance [ 597.794224] env[61839]: self._build_and_run_instance(context, instance, image, [ 597.794224] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2748, in _build_and_run_instance [ 597.794224] env[61839]: raise exception.RescheduledException( [ 597.794224] env[61839]: nova.exception.RescheduledException: Build of instance 89843511-d201-431b-918d-e789e38e4f68 was re-scheduled: Binding failed for port 6e1dec40-0217-4ce2-92ae-fb5211d70403, please check neutron logs for more information. [ 597.794224] env[61839]: During handling of the above exception, another exception occurred: [ 597.794224] env[61839]: Traceback (most recent call last): [ 597.794224] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 597.795113] env[61839]: func(*args, **kwargs) [ 597.795113] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.795113] env[61839]: return func(*args, **kwargs) [ 597.795113] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 597.795113] env[61839]: return f(*args, **kwargs) [ 597.795113] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2347, in _locked_do_build_and_run_instance [ 597.795113] env[61839]: result = self._do_build_and_run_instance(*args, **kwargs) [ 597.795113] env[61839]: File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 597.795113] env[61839]: with excutils.save_and_reraise_exception(): [ 597.795113] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.795113] env[61839]: self.force_reraise() [ 597.795113] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.795113] env[61839]: raise self.value [ 597.795113] env[61839]: File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 597.795113] env[61839]: return f(self, context, *args, **kw) [ 597.795113] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 597.795113] env[61839]: with excutils.save_and_reraise_exception(): [ 597.795113] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.795113] env[61839]: self.force_reraise() [ 597.795113] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.795113] env[61839]: raise self.value [ 597.795113] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 597.795113] env[61839]: return function(self, context, *args, **kwargs) [ 597.795113] env[61839]: File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 597.795113] env[61839]: return function(self, context, *args, **kwargs) [ 597.795113] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 597.795113] env[61839]: return function(self, context, *args, **kwargs) [ 597.795113] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2491, in _do_build_and_run_instance [ 597.795113] env[61839]: instance.save() [ 597.795113] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 209, in wrapper [ 597.795113] env[61839]: updates, result = self.indirection_api.object_action( [ 597.795758] env[61839]: File "/opt/stack/nova/nova/conductor/rpcapi.py", line 247, in object_action [ 597.795758] env[61839]: return cctxt.call(context, 'object_action', objinst=objinst, [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/client.py", line 190, in call [ 597.795758] env[61839]: result = self.transport._send( [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/transport.py", line 123, in _send [ 597.795758] env[61839]: return self._driver.send(target, ctxt, message, [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 788, in send [ 597.795758] env[61839]: return self._send(target, ctxt, message, wait_for_reply, timeout, [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 780, in _send [ 597.795758] env[61839]: raise result [ 597.795758] env[61839]: nova.exception_Remote.InstanceNotFound_Remote: Instance 89843511-d201-431b-918d-e789e38e4f68 could not be found. [ 597.795758] env[61839]: Traceback (most recent call last): [ 597.795758] env[61839]: File "/opt/stack/nova/nova/conductor/manager.py", line 142, in _object_dispatch [ 597.795758] env[61839]: return getattr(target, method)(*args, **kwargs) [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 226, in wrapper [ 597.795758] env[61839]: return fn(self, *args, **kwargs) [ 597.795758] env[61839]: File "/opt/stack/nova/nova/objects/instance.py", line 878, in save [ 597.795758] env[61839]: old_ref, inst_ref = db.instance_update_and_get_original( [ 597.795758] env[61839]: File "/opt/stack/nova/nova/db/utils.py", line 35, in wrapper [ 597.795758] env[61839]: return f(*args, **kwargs) [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 144, in wrapper [ 597.795758] env[61839]: with excutils.save_and_reraise_exception() as ectxt: [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.795758] env[61839]: self.force_reraise() [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.795758] env[61839]: raise self.value [ 597.795758] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 142, in wrapper [ 597.795758] env[61839]: return f(*args, **kwargs) [ 597.795758] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 207, in wrapper [ 597.796425] env[61839]: return f(context, *args, **kwargs) [ 597.796425] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 2283, in instance_update_and_get_original [ 597.796425] env[61839]: instance_ref = _instance_get_by_uuid(context, instance_uuid, [ 597.796425] env[61839]: File "/opt/stack/nova/nova/db/main/api.py", line 1405, in _instance_get_by_uuid [ 597.796425] env[61839]: raise exception.InstanceNotFound(instance_id=uuid) [ 597.796425] env[61839]: nova.exception.InstanceNotFound: Instance 89843511-d201-431b-918d-e789e38e4f68 could not be found. [ 597.951351] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.951911] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 597.956524] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.517s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.028335] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "a89e30e6-b727-440f-a1e8-9c86d19c796d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.028606] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "a89e30e6-b727-440f-a1e8-9c86d19c796d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.214326] env[61839]: DEBUG nova.network.neutron [-] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.299028] env[61839]: DEBUG nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 598.466623] env[61839]: DEBUG nova.compute.utils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 598.468976] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 598.469855] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.593249] env[61839]: DEBUG nova.policy [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64c8d996baf94708962cc0d4dbb8b67a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '506d3d618a79424c86fecd52929d251d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 598.716786] env[61839]: INFO nova.compute.manager [-] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Took 1.03 seconds to deallocate network for instance. [ 598.721913] env[61839]: DEBUG nova.compute.claims [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 598.722580] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.831757] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.844781] env[61839]: INFO nova.scheduler.client.report [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Deleted allocations for instance 0b78efda-51c7-4d51-be31-005ff0d44ede [ 598.971427] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 599.006950] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc34608-84ed-4709-a97c-e511e555ee7d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.016196] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1156f3e-bb25-4461-bdcf-e1afb9729e25 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.053311] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064a7993-a120-41ca-ae84-d14d153662b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.062252] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ecb28a-a559-4c08-a5ae-4956e347617e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.080889] env[61839]: DEBUG nova.compute.provider_tree [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.341296] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Successfully created port: bad8ef9f-10bb-4dde-9386-c21e605b7b42 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.361265] env[61839]: DEBUG oslo_concurrency.lockutils [None req-baec407e-7ec5-474d-acf6-1dc7891e7887 tempest-ImagesOneServerNegativeTestJSON-734658384 tempest-ImagesOneServerNegativeTestJSON-734658384-project-member] Lock "0b78efda-51c7-4d51-be31-005ff0d44ede" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.341s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.585852] env[61839]: DEBUG nova.scheduler.client.report [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 599.864116] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 599.981547] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 600.009564] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.009849] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.011198] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.011428] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.011590] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.011765] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.011989] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.012166] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.012332] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.012493] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.012660] env[61839]: DEBUG nova.virt.hardware [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.013643] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733925cb-2bda-4c6f-a17b-60075b2d8650 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.023080] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e18515-8082-4ffc-9050-21f739199637 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.091014] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.134s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.091678] env[61839]: ERROR nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Traceback (most recent call last): [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self.driver.spawn(context, instance, image_meta, [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] vm_ref = self.build_virtual_machine(instance, [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.091678] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] for vif in network_info: [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return self._sync_wrapper(fn, *args, **kwargs) [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self.wait() [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self[:] = self._gt.wait() [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return self._exit_event.wait() [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] result = hub.switch() [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.092290] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return self.greenlet.switch() [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] result = function(*args, **kwargs) [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] return func(*args, **kwargs) [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] raise e [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] nwinfo = self.network_api.allocate_for_instance( [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] created_port_ids = self._update_ports_for_instance( [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] with excutils.save_and_reraise_exception(): [ 600.092864] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] self.force_reraise() [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] raise self.value [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] updated_port = self._update_port( [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] _ensure_no_port_binding_failure(port) [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] raise exception.PortBindingFailed(port_id=port['id']) [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] nova.exception.PortBindingFailed: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. [ 600.093382] env[61839]: ERROR nova.compute.manager [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] [ 600.093833] env[61839]: DEBUG nova.compute.utils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.093883] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.910s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.095698] env[61839]: INFO nova.compute.claims [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.102028] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Build of instance ae1917f8-29af-43cc-8397-3b9072acee6c was re-scheduled: Binding failed for port 788a594f-0cb1-4669-b641-d1f34ed13cc8, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 600.102028] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 600.102028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Acquiring lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.102028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Acquired lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.102273] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.390420] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.458524] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "337b31e7-a6c9-4f35-9936-62cff06fe2a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.458861] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "337b31e7-a6c9-4f35-9936-62cff06fe2a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.641143] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.919216] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.065501] env[61839]: DEBUG nova.compute.manager [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Received event network-changed-bad8ef9f-10bb-4dde-9386-c21e605b7b42 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 601.065698] env[61839]: DEBUG nova.compute.manager [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Refreshing instance network info cache due to event network-changed-bad8ef9f-10bb-4dde-9386-c21e605b7b42. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 601.067076] env[61839]: DEBUG oslo_concurrency.lockutils [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] Acquiring lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.067076] env[61839]: DEBUG oslo_concurrency.lockutils [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] Acquired lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.067076] env[61839]: DEBUG nova.network.neutron [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Refreshing network info cache for port bad8ef9f-10bb-4dde-9386-c21e605b7b42 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 601.320241] env[61839]: ERROR nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. [ 601.320241] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 601.320241] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.320241] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 601.320241] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.320241] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 601.320241] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.320241] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 601.320241] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.320241] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 601.320241] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.320241] env[61839]: ERROR nova.compute.manager raise self.value [ 601.320241] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.320241] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 601.320241] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.320241] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 601.320747] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.320747] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 601.320747] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. [ 601.320747] env[61839]: ERROR nova.compute.manager [ 601.320747] env[61839]: Traceback (most recent call last): [ 601.320747] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 601.320747] env[61839]: listener.cb(fileno) [ 601.320747] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.320747] env[61839]: result = function(*args, **kwargs) [ 601.320747] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 601.320747] env[61839]: return func(*args, **kwargs) [ 601.320747] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.320747] env[61839]: raise e [ 601.320747] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.320747] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 601.320747] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.320747] env[61839]: created_port_ids = self._update_ports_for_instance( [ 601.320747] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.320747] env[61839]: with excutils.save_and_reraise_exception(): [ 601.320747] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.320747] env[61839]: self.force_reraise() [ 601.320747] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.320747] env[61839]: raise self.value [ 601.320747] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.320747] env[61839]: updated_port = self._update_port( [ 601.320747] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.320747] env[61839]: _ensure_no_port_binding_failure(port) [ 601.320747] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.320747] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 601.321493] env[61839]: nova.exception.PortBindingFailed: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. [ 601.321493] env[61839]: Removing descriptor: 17 [ 601.321493] env[61839]: ERROR nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Traceback (most recent call last): [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] yield resources [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self.driver.spawn(context, instance, image_meta, [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 601.321493] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] vm_ref = self.build_virtual_machine(instance, [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] vif_infos = vmwarevif.get_vif_info(self._session, [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] for vif in network_info: [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return self._sync_wrapper(fn, *args, **kwargs) [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self.wait() [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self[:] = self._gt.wait() [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return self._exit_event.wait() [ 601.321903] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] result = hub.switch() [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return self.greenlet.switch() [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] result = function(*args, **kwargs) [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return func(*args, **kwargs) [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] raise e [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] nwinfo = self.network_api.allocate_for_instance( [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.322243] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] created_port_ids = self._update_ports_for_instance( [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] with excutils.save_and_reraise_exception(): [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self.force_reraise() [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] raise self.value [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] updated_port = self._update_port( [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] _ensure_no_port_binding_failure(port) [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.322549] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] raise exception.PortBindingFailed(port_id=port['id']) [ 601.322836] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] nova.exception.PortBindingFailed: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. [ 601.322836] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] [ 601.322836] env[61839]: INFO nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Terminating instance [ 601.326389] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Acquiring lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.423854] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Releasing lock "refresh_cache-ae1917f8-29af-43cc-8397-3b9072acee6c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.424568] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 601.424843] env[61839]: DEBUG nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.425087] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.468218] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.594641] env[61839]: DEBUG nova.network.neutron [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.619160] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363d3a09-83c6-427c-8509-1dea34d6a99a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.633186] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bd1cf2-6db2-4425-bd61-0b1591e59a51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.671758] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165aed01-4de3-4d3b-99f7-e6406e58bc31 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.679089] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc4d452-c0d9-4edb-8c2d-4da9184342d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.693851] env[61839]: DEBUG nova.compute.provider_tree [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.809817] env[61839]: DEBUG nova.network.neutron [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.971186] env[61839]: DEBUG nova.network.neutron [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.197553] env[61839]: DEBUG nova.scheduler.client.report [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.312170] env[61839]: DEBUG oslo_concurrency.lockutils [req-3bfaa6ed-a332-4bc4-b416-5f6f55fff6c8 req-2ace7ffa-96f7-42ef-9d1d-91cea4da483c service nova] Releasing lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.312580] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Acquired lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.312761] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.477822] env[61839]: INFO nova.compute.manager [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] [instance: ae1917f8-29af-43cc-8397-3b9072acee6c] Took 1.05 seconds to deallocate network for instance. [ 602.631132] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Acquiring lock "59ea60d5-7296-480c-ac03-ec0a7c021300" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.631132] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Lock "59ea60d5-7296-480c-ac03-ec0a7c021300" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.704011] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.704566] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 602.707625] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.634s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.709767] env[61839]: INFO nova.compute.claims [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.850182] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.935974] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.135599] env[61839]: DEBUG nova.compute.manager [req-58eee752-6ac4-48bb-a7a9-ba238247a88a req-433eeb5e-96b1-42dc-a7c3-af5e596e2d60 service nova] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Received event network-vif-deleted-bad8ef9f-10bb-4dde-9386-c21e605b7b42 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 603.214831] env[61839]: DEBUG nova.compute.utils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.217332] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 603.217538] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.439605] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Releasing lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.439605] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 603.439605] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 603.439605] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9def143b-78fc-47f6-b899-9e43c79e8a51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.446075] env[61839]: DEBUG nova.policy [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cdecbaec4c24cf39bc814b8d5e56976', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c42b8792e72408e928cc5b584f6a273', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 603.454090] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7389d1-1dcb-4b9e-a85b-2be09153b038 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.481178] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9e14bd1a-d6c2-4f4a-8919-27647ae5742b could not be found. [ 603.481412] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 603.481596] env[61839]: INFO nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 603.481877] env[61839]: DEBUG oslo.service.loopingcall [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.482123] env[61839]: DEBUG nova.compute.manager [-] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 603.482220] env[61839]: DEBUG nova.network.neutron [-] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 603.517459] env[61839]: DEBUG nova.network.neutron [-] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.527338] env[61839]: INFO nova.scheduler.client.report [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Deleted allocations for instance ae1917f8-29af-43cc-8397-3b9072acee6c [ 603.723566] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 603.964718] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Successfully created port: c9495401-a074-473f-9341-2ef8ba8ae699 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.023428] env[61839]: DEBUG nova.network.neutron [-] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.040578] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a1353c68-52e0-45ee-a66a-5ba6b6d870a4 tempest-InstanceActionsTestJSON-1284580064 tempest-InstanceActionsTestJSON-1284580064-project-member] Lock "ae1917f8-29af-43cc-8397-3b9072acee6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.044s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.682632] env[61839]: INFO nova.compute.manager [-] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Took 1.20 seconds to deallocate network for instance. [ 604.682993] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 604.692074] env[61839]: DEBUG nova.compute.claims [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 604.692074] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.818764] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315a5ea2-6cd7-4000-9e8d-4a1c634fe6f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.827162] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dddf550-dd21-4ca3-a988-37ad84ffd3e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.858464] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b0f69a-edfb-4caa-ad49-c261af886f87 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.866808] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b9ef03-2a42-4e1b-8735-6d782bd935c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.881033] env[61839]: DEBUG nova.compute.provider_tree [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.186909] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 605.214701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.225081] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.225238] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.225399] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.225577] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.225720] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.225881] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.226480] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.226855] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.227124] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.227362] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.227545] env[61839]: DEBUG nova.virt.hardware [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.228733] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b42f043-ce51-4e56-a817-3ba04165aac2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.238666] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576e3aba-edaa-438f-8d7e-96b09fb50efd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.386199] env[61839]: DEBUG nova.scheduler.client.report [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.403187] env[61839]: DEBUG nova.compute.manager [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Received event network-changed-c9495401-a074-473f-9341-2ef8ba8ae699 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 605.404740] env[61839]: DEBUG nova.compute.manager [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Refreshing instance network info cache due to event network-changed-c9495401-a074-473f-9341-2ef8ba8ae699. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 605.405306] env[61839]: DEBUG oslo_concurrency.lockutils [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] Acquiring lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.405736] env[61839]: DEBUG oslo_concurrency.lockutils [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] Acquired lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.405736] env[61839]: DEBUG nova.network.neutron [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Refreshing network info cache for port c9495401-a074-473f-9341-2ef8ba8ae699 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 605.727498] env[61839]: ERROR nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. [ 605.727498] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 605.727498] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.727498] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 605.727498] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.727498] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 605.727498] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.727498] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 605.727498] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.727498] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 605.727498] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.727498] env[61839]: ERROR nova.compute.manager raise self.value [ 605.727498] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.727498] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 605.727498] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.727498] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 605.728096] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.728096] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 605.728096] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. [ 605.728096] env[61839]: ERROR nova.compute.manager [ 605.728096] env[61839]: Traceback (most recent call last): [ 605.728096] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 605.728096] env[61839]: listener.cb(fileno) [ 605.728096] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.728096] env[61839]: result = function(*args, **kwargs) [ 605.728096] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.728096] env[61839]: return func(*args, **kwargs) [ 605.728096] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.728096] env[61839]: raise e [ 605.728096] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.728096] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 605.728096] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.728096] env[61839]: created_port_ids = self._update_ports_for_instance( [ 605.728096] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.728096] env[61839]: with excutils.save_and_reraise_exception(): [ 605.728096] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.728096] env[61839]: self.force_reraise() [ 605.728096] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.728096] env[61839]: raise self.value [ 605.728096] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.728096] env[61839]: updated_port = self._update_port( [ 605.728096] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.728096] env[61839]: _ensure_no_port_binding_failure(port) [ 605.728096] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.728096] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 605.728720] env[61839]: nova.exception.PortBindingFailed: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. [ 605.728720] env[61839]: Removing descriptor: 17 [ 605.728720] env[61839]: ERROR nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Traceback (most recent call last): [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] yield resources [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self.driver.spawn(context, instance, image_meta, [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.728720] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] vm_ref = self.build_virtual_machine(instance, [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] for vif in network_info: [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return self._sync_wrapper(fn, *args, **kwargs) [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self.wait() [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self[:] = self._gt.wait() [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return self._exit_event.wait() [ 605.728987] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] result = hub.switch() [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return self.greenlet.switch() [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] result = function(*args, **kwargs) [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return func(*args, **kwargs) [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] raise e [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] nwinfo = self.network_api.allocate_for_instance( [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.729275] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] created_port_ids = self._update_ports_for_instance( [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] with excutils.save_and_reraise_exception(): [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self.force_reraise() [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] raise self.value [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] updated_port = self._update_port( [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] _ensure_no_port_binding_failure(port) [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.729552] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] raise exception.PortBindingFailed(port_id=port['id']) [ 605.729859] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] nova.exception.PortBindingFailed: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. [ 605.729859] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] [ 605.729859] env[61839]: INFO nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Terminating instance [ 605.733666] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.898054] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.190s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.898584] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 605.901193] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.393s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.904020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.904020] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 605.904020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.673s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.904020] env[61839]: INFO nova.compute.claims [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.906544] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb457edd-3c66-4a91-9f66-baeebc921df2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.916405] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3b84e2-2000-44de-af28-8a19e975a234 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.937212] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4033b5e5-e17d-4519-89bb-78003ae7a060 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.941165] env[61839]: DEBUG nova.network.neutron [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.947514] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a6e14c-976b-4993-b524-28b2622bb573 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.980833] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181458MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 605.980833] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.341862] env[61839]: DEBUG nova.network.neutron [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.407816] env[61839]: DEBUG nova.compute.utils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.409334] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 606.409591] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 606.517203] env[61839]: DEBUG nova.policy [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17993d093b1046bfb69f07d6082d43f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '291da69a81e249939a5d7fee7c68fc86', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 606.846793] env[61839]: DEBUG oslo_concurrency.lockutils [req-bb9495e6-d394-4a73-8c5d-66943f51c9b0 req-8729b8a3-f70d-46c1-9f44-e0b1cf3da7be service nova] Releasing lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.846793] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquired lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.846793] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.917035] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 607.214351] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Successfully created port: 245851ea-1302-48e0-a961-6cf5b78f0b43 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.386077] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.407266] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df26376-73ed-487b-aea5-c52a0ae3d6f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.417496] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7370c4e-73c4-43e8-a2c2-5908173f0f07 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.457752] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6206cea8-3bd3-4a64-b0a8-030c11fd4c5d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.466817] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f43185-1c52-4b03-a8c3-5ad7e997748d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.481922] env[61839]: DEBUG nova.compute.provider_tree [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.558900] env[61839]: DEBUG nova.compute.manager [req-9a48fb17-fc6b-45c6-b427-951964a18b9d req-3f2ba7df-a08d-41a1-aa1f-f62ba8f2b2e7 service nova] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Received event network-vif-deleted-c9495401-a074-473f-9341-2ef8ba8ae699 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 607.585716] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.930973] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 607.967035] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.967035] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.967035] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.967239] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.967239] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.967239] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.967239] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.967239] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.967732] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.968062] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.968365] env[61839]: DEBUG nova.virt.hardware [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.969415] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c7dd7e-d004-486a-a438-bbe23f44e623 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.979094] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cf3b85-aff1-4014-93d9-ddd234248fc7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.989394] env[61839]: DEBUG nova.scheduler.client.report [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 608.090424] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Releasing lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.090856] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 608.091117] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 608.091470] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d416149c-daaf-4aab-b157-6806e1797c3d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.100953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c578b743-cd69-4063-a70a-8cf71337e565 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.125768] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d95b34bb-bf0f-4a43-a5ad-6ae7770b606c could not be found. [ 608.126101] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.126332] env[61839]: INFO nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 608.126584] env[61839]: DEBUG oslo.service.loopingcall [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.126971] env[61839]: DEBUG nova.compute.manager [-] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.127088] env[61839]: DEBUG nova.network.neutron [-] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.164488] env[61839]: DEBUG nova.network.neutron [-] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.495615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.496750] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 608.500115] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.032s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.501318] env[61839]: INFO nova.compute.claims [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.671581] env[61839]: DEBUG nova.network.neutron [-] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.915471] env[61839]: ERROR nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. [ 608.915471] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 608.915471] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.915471] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 608.915471] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.915471] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 608.915471] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.915471] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 608.915471] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.915471] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 608.915471] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.915471] env[61839]: ERROR nova.compute.manager raise self.value [ 608.915471] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.915471] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 608.915471] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.915471] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 608.915934] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.915934] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 608.915934] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. [ 608.915934] env[61839]: ERROR nova.compute.manager [ 608.915934] env[61839]: Traceback (most recent call last): [ 608.915934] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 608.915934] env[61839]: listener.cb(fileno) [ 608.915934] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.915934] env[61839]: result = function(*args, **kwargs) [ 608.915934] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 608.915934] env[61839]: return func(*args, **kwargs) [ 608.915934] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.915934] env[61839]: raise e [ 608.915934] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.915934] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 608.915934] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.915934] env[61839]: created_port_ids = self._update_ports_for_instance( [ 608.915934] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.915934] env[61839]: with excutils.save_and_reraise_exception(): [ 608.915934] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.915934] env[61839]: self.force_reraise() [ 608.915934] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.915934] env[61839]: raise self.value [ 608.915934] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.915934] env[61839]: updated_port = self._update_port( [ 608.915934] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.915934] env[61839]: _ensure_no_port_binding_failure(port) [ 608.915934] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.915934] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 608.916577] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. [ 608.916577] env[61839]: Removing descriptor: 17 [ 608.916577] env[61839]: ERROR nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] Traceback (most recent call last): [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] yield resources [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self.driver.spawn(context, instance, image_meta, [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.916577] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] vm_ref = self.build_virtual_machine(instance, [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] for vif in network_info: [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return self._sync_wrapper(fn, *args, **kwargs) [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self.wait() [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self[:] = self._gt.wait() [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return self._exit_event.wait() [ 608.916848] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] result = hub.switch() [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return self.greenlet.switch() [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] result = function(*args, **kwargs) [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return func(*args, **kwargs) [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] raise e [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] nwinfo = self.network_api.allocate_for_instance( [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.917147] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] created_port_ids = self._update_ports_for_instance( [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] with excutils.save_and_reraise_exception(): [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self.force_reraise() [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] raise self.value [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] updated_port = self._update_port( [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] _ensure_no_port_binding_failure(port) [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.917429] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] raise exception.PortBindingFailed(port_id=port['id']) [ 608.917685] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] nova.exception.PortBindingFailed: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. [ 608.917685] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] [ 608.917685] env[61839]: INFO nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Terminating instance [ 608.918800] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Acquiring lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.918917] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Acquired lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.919092] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.010890] env[61839]: DEBUG nova.compute.utils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 609.018372] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 609.018545] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 609.076157] env[61839]: DEBUG nova.policy [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71c8c05326254b0cbada5839fb86bd08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f95ba3c476274972b332b5f24ad7cb1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 609.176436] env[61839]: INFO nova.compute.manager [-] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Took 1.05 seconds to deallocate network for instance. [ 609.180499] env[61839]: DEBUG nova.compute.claims [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 609.180692] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.384392] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "619ec15b-463a-4daa-bffe-7d7a6022b962" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.384828] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "619ec15b-463a-4daa-bffe-7d7a6022b962" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.452370] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.518757] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 609.574048] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Successfully created port: daf6e1bb-8088-4f75-aea6-a7512ec93136 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.585777] env[61839]: DEBUG nova.compute.manager [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Received event network-changed-245851ea-1302-48e0-a961-6cf5b78f0b43 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 609.586018] env[61839]: DEBUG nova.compute.manager [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Refreshing instance network info cache due to event network-changed-245851ea-1302-48e0-a961-6cf5b78f0b43. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 609.586228] env[61839]: DEBUG oslo_concurrency.lockutils [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] Acquiring lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.588065] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.989717] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3811d0-6f11-4edf-b5c7-1380e31ac4ee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.997907] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb276234-3e11-40c1-964d-035d565c6a3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.038647] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6368d8a-5fb3-4bb7-9952-98c3aebaf4b6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.050081] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ffa23d-0a12-4e05-9566-05fea86853ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.065856] env[61839]: DEBUG nova.compute.provider_tree [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.093694] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Releasing lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.094126] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 610.094126] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 610.094404] env[61839]: DEBUG oslo_concurrency.lockutils [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] Acquired lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.094569] env[61839]: DEBUG nova.network.neutron [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Refreshing network info cache for port 245851ea-1302-48e0-a961-6cf5b78f0b43 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.095568] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-508ec170-b5f4-4426-918e-4344f9b4a5ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.106401] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf9a95a-610d-4021-9f0c-2cc543c5e386 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.133849] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 916834d8-4819-4167-8774-b0a665021ef8 could not be found. [ 610.134027] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 610.134215] env[61839]: INFO nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 610.134464] env[61839]: DEBUG oslo.service.loopingcall [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 610.134675] env[61839]: DEBUG nova.compute.manager [-] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 610.134764] env[61839]: DEBUG nova.network.neutron [-] [instance: 916834d8-4819-4167-8774-b0a665021ef8] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 610.176988] env[61839]: DEBUG nova.network.neutron [-] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.545836] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 610.571501] env[61839]: DEBUG nova.scheduler.client.report [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 610.577216] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 610.577440] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 610.577592] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.577792] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 610.577987] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.578103] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 610.578582] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 610.578582] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 610.578645] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 610.582752] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 610.582752] env[61839]: DEBUG nova.virt.hardware [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 610.582752] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b679f90-43d1-4593-bfaf-b45596de06a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.589283] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373bf685-fc42-4268-8054-37f0768f50a2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.639604] env[61839]: DEBUG nova.network.neutron [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.682449] env[61839]: DEBUG nova.network.neutron [-] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.852098] env[61839]: DEBUG nova.network.neutron [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.083849] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.084674] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 611.088086] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.629s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.089594] env[61839]: INFO nova.compute.claims [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.185319] env[61839]: INFO nova.compute.manager [-] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Took 1.05 seconds to deallocate network for instance. [ 611.189226] env[61839]: DEBUG nova.compute.claims [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 611.189226] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.235691] env[61839]: ERROR nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. [ 611.235691] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 611.235691] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.235691] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 611.235691] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.235691] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 611.235691] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.235691] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 611.235691] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.235691] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 611.235691] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.235691] env[61839]: ERROR nova.compute.manager raise self.value [ 611.235691] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.235691] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 611.235691] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.235691] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 611.236277] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.236277] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 611.236277] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. [ 611.236277] env[61839]: ERROR nova.compute.manager [ 611.236277] env[61839]: Traceback (most recent call last): [ 611.236277] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 611.236277] env[61839]: listener.cb(fileno) [ 611.236277] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.236277] env[61839]: result = function(*args, **kwargs) [ 611.236277] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.236277] env[61839]: return func(*args, **kwargs) [ 611.236277] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.236277] env[61839]: raise e [ 611.236277] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.236277] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 611.236277] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.236277] env[61839]: created_port_ids = self._update_ports_for_instance( [ 611.236277] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.236277] env[61839]: with excutils.save_and_reraise_exception(): [ 611.236277] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.236277] env[61839]: self.force_reraise() [ 611.236277] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.236277] env[61839]: raise self.value [ 611.236277] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.236277] env[61839]: updated_port = self._update_port( [ 611.236277] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.236277] env[61839]: _ensure_no_port_binding_failure(port) [ 611.236277] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.236277] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 611.237039] env[61839]: nova.exception.PortBindingFailed: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. [ 611.237039] env[61839]: Removing descriptor: 17 [ 611.237039] env[61839]: ERROR nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Traceback (most recent call last): [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] yield resources [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self.driver.spawn(context, instance, image_meta, [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 611.237039] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] vm_ref = self.build_virtual_machine(instance, [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] vif_infos = vmwarevif.get_vif_info(self._session, [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] for vif in network_info: [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return self._sync_wrapper(fn, *args, **kwargs) [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self.wait() [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self[:] = self._gt.wait() [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return self._exit_event.wait() [ 611.237370] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] result = hub.switch() [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return self.greenlet.switch() [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] result = function(*args, **kwargs) [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return func(*args, **kwargs) [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] raise e [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] nwinfo = self.network_api.allocate_for_instance( [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.237977] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] created_port_ids = self._update_ports_for_instance( [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] with excutils.save_and_reraise_exception(): [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self.force_reraise() [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] raise self.value [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] updated_port = self._update_port( [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] _ensure_no_port_binding_failure(port) [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.238406] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] raise exception.PortBindingFailed(port_id=port['id']) [ 611.238720] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] nova.exception.PortBindingFailed: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. [ 611.238720] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] [ 611.238720] env[61839]: INFO nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Terminating instance [ 611.240019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.240019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquired lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.240156] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.352227] env[61839]: DEBUG oslo_concurrency.lockutils [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] Releasing lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.352510] env[61839]: DEBUG nova.compute.manager [req-9a1d68b6-ebba-4a21-aa58-929b8d2a8eeb req-6217e98a-97bc-4e8d-94c3-569dcd91f6dc service nova] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Received event network-vif-deleted-245851ea-1302-48e0-a961-6cf5b78f0b43 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 611.593885] env[61839]: DEBUG nova.compute.utils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 611.600858] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 611.600858] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.604509] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Acquiring lock "821b784d-dc69-4c54-bccf-76693c34e19d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.604800] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Lock "821b784d-dc69-4c54-bccf-76693c34e19d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.615133] env[61839]: DEBUG nova.compute.manager [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Received event network-changed-daf6e1bb-8088-4f75-aea6-a7512ec93136 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 611.615337] env[61839]: DEBUG nova.compute.manager [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Refreshing instance network info cache due to event network-changed-daf6e1bb-8088-4f75-aea6-a7512ec93136. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 611.615532] env[61839]: DEBUG oslo_concurrency.lockutils [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] Acquiring lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.660784] env[61839]: DEBUG nova.policy [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cdecbaec4c24cf39bc814b8d5e56976', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c42b8792e72408e928cc5b584f6a273', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 611.778699] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.946139] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.050130] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Successfully created port: 6885c1b8-b5bc-4e67-963a-1bc48d70db6b {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.098813] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 612.450008] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Releasing lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.450430] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 612.450626] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 612.451288] env[61839]: DEBUG oslo_concurrency.lockutils [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] Acquired lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.451474] env[61839]: DEBUG nova.network.neutron [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Refreshing network info cache for port daf6e1bb-8088-4f75-aea6-a7512ec93136 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 612.452523] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2988f86-96cd-48fa-96c5-2dd7db206be1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.461879] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f47758-3398-4600-b49a-c3f44368caee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.490169] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7fdd773e-3a96-4728-b162-0227a415bc96 could not be found. [ 612.490419] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 612.490590] env[61839]: INFO nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Took 0.04 seconds to destroy the instance on the hypervisor. [ 612.490822] env[61839]: DEBUG oslo.service.loopingcall [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.493565] env[61839]: DEBUG nova.compute.manager [-] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 612.493667] env[61839]: DEBUG nova.network.neutron [-] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 612.513805] env[61839]: DEBUG nova.network.neutron [-] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.643711] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd121d4-e19c-49b5-acd1-9942b38a24c1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.652937] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2636d789-8b41-4c87-a043-2131669aabae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.691568] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395de38e-434b-4f17-81af-c7bd0f3e89a1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.699217] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0811340-2782-456d-bce9-1de18fb7c4c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.713240] env[61839]: DEBUG nova.compute.provider_tree [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.978066] env[61839]: DEBUG nova.network.neutron [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.017590] env[61839]: DEBUG nova.network.neutron [-] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.112463] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 613.135531] env[61839]: DEBUG nova.network.neutron [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.142140] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.142140] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.142140] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.142291] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.142291] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.142291] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.142383] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.142454] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.142615] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.142768] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.142958] env[61839]: DEBUG nova.virt.hardware [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.143811] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbffa50-f1c8-4d74-948d-44b353c51dc2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.154775] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf89b30-c643-46b8-b14b-88b6f62ff319 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.216664] env[61839]: DEBUG nova.scheduler.client.report [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 613.381496] env[61839]: ERROR nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. [ 613.381496] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 613.381496] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.381496] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 613.381496] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.381496] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 613.381496] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.381496] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 613.381496] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.381496] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 613.381496] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.381496] env[61839]: ERROR nova.compute.manager raise self.value [ 613.381496] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.381496] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 613.381496] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.381496] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 613.382139] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.382139] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 613.382139] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. [ 613.382139] env[61839]: ERROR nova.compute.manager [ 613.382139] env[61839]: Traceback (most recent call last): [ 613.382139] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 613.382139] env[61839]: listener.cb(fileno) [ 613.382139] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.382139] env[61839]: result = function(*args, **kwargs) [ 613.382139] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 613.382139] env[61839]: return func(*args, **kwargs) [ 613.382139] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.382139] env[61839]: raise e [ 613.382139] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.382139] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 613.382139] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.382139] env[61839]: created_port_ids = self._update_ports_for_instance( [ 613.382139] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.382139] env[61839]: with excutils.save_and_reraise_exception(): [ 613.382139] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.382139] env[61839]: self.force_reraise() [ 613.382139] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.382139] env[61839]: raise self.value [ 613.382139] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.382139] env[61839]: updated_port = self._update_port( [ 613.382139] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.382139] env[61839]: _ensure_no_port_binding_failure(port) [ 613.382139] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.382139] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 613.382968] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. [ 613.382968] env[61839]: Removing descriptor: 17 [ 613.382968] env[61839]: ERROR nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Traceback (most recent call last): [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] yield resources [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self.driver.spawn(context, instance, image_meta, [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.382968] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] vm_ref = self.build_virtual_machine(instance, [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] for vif in network_info: [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return self._sync_wrapper(fn, *args, **kwargs) [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self.wait() [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self[:] = self._gt.wait() [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return self._exit_event.wait() [ 613.383329] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] result = hub.switch() [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return self.greenlet.switch() [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] result = function(*args, **kwargs) [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return func(*args, **kwargs) [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] raise e [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] nwinfo = self.network_api.allocate_for_instance( [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.383798] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] created_port_ids = self._update_ports_for_instance( [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] with excutils.save_and_reraise_exception(): [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self.force_reraise() [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] raise self.value [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] updated_port = self._update_port( [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] _ensure_no_port_binding_failure(port) [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.384286] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] raise exception.PortBindingFailed(port_id=port['id']) [ 613.384663] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] nova.exception.PortBindingFailed: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. [ 613.384663] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] [ 613.384663] env[61839]: INFO nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Terminating instance [ 613.384920] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.385093] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquired lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.385261] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 613.520122] env[61839]: INFO nova.compute.manager [-] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Took 1.03 seconds to deallocate network for instance. [ 613.525173] env[61839]: DEBUG nova.compute.claims [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 613.525354] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.636536] env[61839]: DEBUG nova.compute.manager [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Received event network-changed-6885c1b8-b5bc-4e67-963a-1bc48d70db6b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 613.636725] env[61839]: DEBUG nova.compute.manager [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Refreshing instance network info cache due to event network-changed-6885c1b8-b5bc-4e67-963a-1bc48d70db6b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 613.637080] env[61839]: DEBUG oslo_concurrency.lockutils [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] Acquiring lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.647742] env[61839]: DEBUG oslo_concurrency.lockutils [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] Releasing lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.648352] env[61839]: DEBUG nova.compute.manager [req-13d83a5b-4467-4612-bb9f-a23091e73f90 req-3ff21fe8-fad9-44e6-939b-fddc14a204f4 service nova] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Received event network-vif-deleted-daf6e1bb-8088-4f75-aea6-a7512ec93136 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 613.722062] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.722656] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 613.727638] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.003s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.904823] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.997820] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.232972] env[61839]: DEBUG nova.compute.utils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 614.237980] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 614.238191] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 614.285945] env[61839]: DEBUG nova.policy [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cdecbaec4c24cf39bc814b8d5e56976', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c42b8792e72408e928cc5b584f6a273', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 614.502809] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Releasing lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.503299] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 614.503457] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.503960] env[61839]: DEBUG oslo_concurrency.lockutils [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] Acquired lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.504191] env[61839]: DEBUG nova.network.neutron [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Refreshing network info cache for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.505491] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-319659b5-922d-412e-ae7c-4cea74258b38 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.515021] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958e9edd-36fa-418b-b780-53d8f2192955 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.540435] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f8b36479-70a1-4f4e-84f4-e3baf9a56c45 could not be found. [ 614.540661] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.540842] env[61839]: INFO nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Took 0.04 seconds to destroy the instance on the hypervisor. [ 614.541144] env[61839]: DEBUG oslo.service.loopingcall [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.543691] env[61839]: DEBUG nova.compute.manager [-] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.543691] env[61839]: DEBUG nova.network.neutron [-] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.572433] env[61839]: DEBUG nova.network.neutron [-] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.658759] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c0c7ad-cdb2-4a7a-b2ca-26b31412f2e1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.667408] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8fb5b3-fa54-48f9-b98f-f54c755bcf92 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.696442] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20142ee-12b5-4df1-92f5-fbaab27343aa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.703781] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7e2c23-dd64-4434-a044-927b8ff9b59c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.709419] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Successfully created port: 96fcea9f-31d0-41ce-8278-a9e04715c1d5 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.721752] env[61839]: DEBUG nova.compute.provider_tree [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.739125] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 615.048269] env[61839]: DEBUG nova.network.neutron [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.075607] env[61839]: DEBUG nova.network.neutron [-] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.223750] env[61839]: DEBUG nova.scheduler.client.report [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 615.243213] env[61839]: DEBUG nova.network.neutron [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.579290] env[61839]: INFO nova.compute.manager [-] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Took 1.03 seconds to deallocate network for instance. [ 615.580269] env[61839]: DEBUG nova.compute.claims [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.580343] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.730422] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.005s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.731172] env[61839]: ERROR nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Traceback (most recent call last): [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self.driver.spawn(context, instance, image_meta, [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] vm_ref = self.build_virtual_machine(instance, [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] vif_infos = vmwarevif.get_vif_info(self._session, [ 615.731172] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] for vif in network_info: [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return self._sync_wrapper(fn, *args, **kwargs) [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self.wait() [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self[:] = self._gt.wait() [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return self._exit_event.wait() [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] result = hub.switch() [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 615.731486] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return self.greenlet.switch() [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] result = function(*args, **kwargs) [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] return func(*args, **kwargs) [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] raise e [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] nwinfo = self.network_api.allocate_for_instance( [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] created_port_ids = self._update_ports_for_instance( [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] with excutils.save_and_reraise_exception(): [ 615.731801] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] self.force_reraise() [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] raise self.value [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] updated_port = self._update_port( [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] _ensure_no_port_binding_failure(port) [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] raise exception.PortBindingFailed(port_id=port['id']) [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] nova.exception.PortBindingFailed: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. [ 615.732162] env[61839]: ERROR nova.compute.manager [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] [ 615.732542] env[61839]: DEBUG nova.compute.utils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 615.733909] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.902s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.735874] env[61839]: INFO nova.compute.claims [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.738551] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Build of instance 1edc2966-2edc-453e-a80d-c4139d910a6d was re-scheduled: Binding failed for port efae4232-1d88-49a1-ae17-8b83c8c13197, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 615.739474] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 615.739747] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Acquiring lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.739959] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Acquired lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.740174] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.748651] env[61839]: DEBUG oslo_concurrency.lockutils [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] Releasing lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.749785] env[61839]: DEBUG nova.compute.manager [req-6545903c-00b6-4893-831f-77a32c0cadef req-d7e178b2-cbfd-40e2-9a8e-cd4d1315c7dc service nova] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Received event network-vif-deleted-6885c1b8-b5bc-4e67-963a-1bc48d70db6b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.750625] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 615.798607] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 615.798856] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 615.799031] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 615.799273] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 615.799368] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 615.799503] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 615.799690] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 615.799849] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 615.800059] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 615.800260] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 615.800409] env[61839]: DEBUG nova.virt.hardware [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 615.801609] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee71e913-8b96-4eef-b530-4abdb20b9b6e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.810827] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714e1e1f-d569-4ecf-9683-54733042807f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.015915] env[61839]: DEBUG nova.compute.manager [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Received event network-changed-96fcea9f-31d0-41ce-8278-a9e04715c1d5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 616.016131] env[61839]: DEBUG nova.compute.manager [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Refreshing instance network info cache due to event network-changed-96fcea9f-31d0-41ce-8278-a9e04715c1d5. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 616.016357] env[61839]: DEBUG oslo_concurrency.lockutils [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] Acquiring lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.016502] env[61839]: DEBUG oslo_concurrency.lockutils [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] Acquired lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.016661] env[61839]: DEBUG nova.network.neutron [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Refreshing network info cache for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.222197] env[61839]: ERROR nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. [ 616.222197] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 616.222197] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.222197] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 616.222197] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.222197] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 616.222197] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.222197] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 616.222197] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.222197] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 616.222197] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.222197] env[61839]: ERROR nova.compute.manager raise self.value [ 616.222197] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.222197] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 616.222197] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.222197] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 616.222662] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.222662] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 616.222662] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. [ 616.222662] env[61839]: ERROR nova.compute.manager [ 616.222662] env[61839]: Traceback (most recent call last): [ 616.222662] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 616.222662] env[61839]: listener.cb(fileno) [ 616.222662] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.222662] env[61839]: result = function(*args, **kwargs) [ 616.222662] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.222662] env[61839]: return func(*args, **kwargs) [ 616.222662] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.222662] env[61839]: raise e [ 616.222662] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.222662] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 616.222662] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.222662] env[61839]: created_port_ids = self._update_ports_for_instance( [ 616.222662] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.222662] env[61839]: with excutils.save_and_reraise_exception(): [ 616.222662] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.222662] env[61839]: self.force_reraise() [ 616.222662] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.222662] env[61839]: raise self.value [ 616.222662] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.222662] env[61839]: updated_port = self._update_port( [ 616.222662] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.222662] env[61839]: _ensure_no_port_binding_failure(port) [ 616.222662] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.222662] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 616.223449] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. [ 616.223449] env[61839]: Removing descriptor: 17 [ 616.223449] env[61839]: ERROR nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Traceback (most recent call last): [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] yield resources [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self.driver.spawn(context, instance, image_meta, [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self._vmops.spawn(context, instance, image_meta, injected_files, [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 616.223449] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] vm_ref = self.build_virtual_machine(instance, [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] vif_infos = vmwarevif.get_vif_info(self._session, [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] for vif in network_info: [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return self._sync_wrapper(fn, *args, **kwargs) [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self.wait() [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self[:] = self._gt.wait() [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return self._exit_event.wait() [ 616.223732] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] result = hub.switch() [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return self.greenlet.switch() [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] result = function(*args, **kwargs) [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return func(*args, **kwargs) [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] raise e [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] nwinfo = self.network_api.allocate_for_instance( [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.224052] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] created_port_ids = self._update_ports_for_instance( [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] with excutils.save_and_reraise_exception(): [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self.force_reraise() [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] raise self.value [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] updated_port = self._update_port( [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] _ensure_no_port_binding_failure(port) [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.224415] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] raise exception.PortBindingFailed(port_id=port['id']) [ 616.224688] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] nova.exception.PortBindingFailed: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. [ 616.224688] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] [ 616.224688] env[61839]: INFO nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Terminating instance [ 616.231067] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.267760] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.409771] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.543788] env[61839]: DEBUG nova.network.neutron [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.705700] env[61839]: DEBUG nova.network.neutron [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.913997] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Releasing lock "refresh_cache-1edc2966-2edc-453e-a80d-c4139d910a6d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.914448] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 616.914735] env[61839]: DEBUG nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 616.915018] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 616.942282] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.188759] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1505a068-6f66-463a-b943-781f226be3e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.196242] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e48d4df-2a66-4fd4-ab9a-b2dc28fb4c5d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.225165] env[61839]: DEBUG oslo_concurrency.lockutils [req-a6ceb4f7-8bcb-43e3-9a77-5395255de595 req-cfe97cf1-13ab-4fcb-ac07-7cf6855496d7 service nova] Releasing lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.226158] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquired lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.226357] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.227885] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f2de5c-8a4b-46ec-83e8-1249e601bb1d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.236054] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b9c048-5fdb-436d-9195-dba293294639 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.249753] env[61839]: DEBUG nova.compute.provider_tree [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.445296] env[61839]: DEBUG nova.network.neutron [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.756035] env[61839]: DEBUG nova.scheduler.client.report [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 617.760319] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.850649] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.950363] env[61839]: INFO nova.compute.manager [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] [instance: 1edc2966-2edc-453e-a80d-c4139d910a6d] Took 1.03 seconds to deallocate network for instance. [ 618.085266] env[61839]: DEBUG nova.compute.manager [req-8f577b0d-6aac-40c1-a1ed-49a4a2f1d60a req-9475ba4f-5352-407b-9b1b-c79cd6ed56b2 service nova] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Received event network-vif-deleted-96fcea9f-31d0-41ce-8278-a9e04715c1d5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.258515] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.524s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.258756] env[61839]: DEBUG nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 618.261696] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.872s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.265022] env[61839]: INFO nova.compute.claims [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.354047] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Releasing lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.354047] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 618.354222] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 618.354497] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fbbc7fd-8b61-4a86-849a-6adc29a059de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.363508] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1738f886-89e0-4460-9185-616c68e5c645 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.384254] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 86a1fc77-26d9-44c7-8f1f-771315769619 could not be found. [ 618.384577] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 618.384856] env[61839]: INFO nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Took 0.03 seconds to destroy the instance on the hypervisor. [ 618.385133] env[61839]: DEBUG oslo.service.loopingcall [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.385356] env[61839]: DEBUG nova.compute.manager [-] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.385448] env[61839]: DEBUG nova.network.neutron [-] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.400975] env[61839]: DEBUG nova.network.neutron [-] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.767808] env[61839]: DEBUG nova.compute.utils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 618.772057] env[61839]: DEBUG nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Not allocating networking since 'none' was specified. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 618.904049] env[61839]: DEBUG nova.network.neutron [-] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.979173] env[61839]: INFO nova.scheduler.client.report [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Deleted allocations for instance 1edc2966-2edc-453e-a80d-c4139d910a6d [ 619.273219] env[61839]: DEBUG nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 619.406563] env[61839]: INFO nova.compute.manager [-] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Took 1.02 seconds to deallocate network for instance. [ 619.409030] env[61839]: DEBUG nova.compute.claims [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 619.409825] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.486686] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d60a164-1a17-4d70-8bc1-56b11131db97 tempest-InstanceActionsV221TestJSON-240629897 tempest-InstanceActionsV221TestJSON-240629897-project-member] Lock "1edc2966-2edc-453e-a80d-c4139d910a6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.371s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.619655] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfacab80-3c21-4e89-b21d-39f9970228e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.628482] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2da579a-8721-4f08-a953-67654b7b6653 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.666670] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f7f588-0964-4ed6-8e21-d0a6a28c5d37 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.675430] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e10886-1839-47c5-b815-9b93b10ba562 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.688938] env[61839]: DEBUG nova.compute.provider_tree [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.988987] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 620.191802] env[61839]: DEBUG nova.scheduler.client.report [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.284850] env[61839]: DEBUG nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 620.311116] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 620.311500] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 620.311715] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.311917] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 620.312151] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.312308] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 620.312513] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 620.312670] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 620.313351] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 620.313351] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 620.313351] env[61839]: DEBUG nova.virt.hardware [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.314083] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bee75f-4fbd-4ef9-9e31-0cada1bfe1d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.322155] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70e0ffd-d9ab-492b-8866-327005a5e591 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.336724] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 620.342461] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Creating folder: Project (93c30f8bc48d44b1a3c7f29f56396808). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.343088] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66230ede-ac6a-409d-84b4-59ce77270f19 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.354329] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Created folder: Project (93c30f8bc48d44b1a3c7f29f56396808) in parent group-v281288. [ 620.354603] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Creating folder: Instances. Parent ref: group-v281301. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.354842] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c38bff92-8ab6-47d0-9084-f895ade28e69 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.364451] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Created folder: Instances in parent group-v281301. [ 620.364668] env[61839]: DEBUG oslo.service.loopingcall [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.364846] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 620.365042] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-594ecb67-c7d6-4d72-8a62-0f65281a9a40 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.381195] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 620.381195] env[61839]: value = "task-1314273" [ 620.381195] env[61839]: _type = "Task" [ 620.381195] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.389064] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314273, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.511536] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.700312] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.700984] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 620.706977] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.016s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.892395] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314273, 'name': CreateVM_Task, 'duration_secs': 0.244834} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.892576] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 620.893178] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.893178] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.893471] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 620.893715] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7533dc7-ec00-4e6b-b5d9-0805d0ee146e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.898240] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 620.898240] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c9069e-7bb5-e616-0b8e-5def6f40b1ce" [ 620.898240] env[61839]: _type = "Task" [ 620.898240] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.906110] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c9069e-7bb5-e616-0b8e-5def6f40b1ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.217025] env[61839]: DEBUG nova.compute.utils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 621.220932] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 621.221137] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 621.285477] env[61839]: DEBUG nova.policy [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de344d8cc13340d7affed971d75f486d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '251b0d2531ba4f14a2eb6ea75382c418', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 621.415774] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c9069e-7bb5-e616-0b8e-5def6f40b1ce, 'name': SearchDatastore_Task, 'duration_secs': 0.011488} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.416636] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.416636] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 621.416869] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.416942] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.417337] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 621.417604] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cf49f15-32f3-45c4-bbd2-8be2e3c4b1fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.426404] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.426601] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 621.434877] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0416312d-de2d-4960-848e-3d9c8247648a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.435112] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 621.435112] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52236a36-27d5-27f5-9158-403c73e17d11" [ 621.435112] env[61839]: _type = "Task" [ 621.435112] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.443253] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52236a36-27d5-27f5-9158-403c73e17d11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.645856] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Successfully created port: 3e7ac607-a43c-4448-814a-c9266b8fe7db {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.723652] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 621.738137] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502d17ab-520f-4677-a5ff-67d22119f63e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.746889] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1503ac-ad17-4719-8758-bdde1e91874a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.781251] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa488ae5-52cd-46ef-b67b-c30e96064746 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.789197] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e435e3-6f38-47f3-af03-0bb26c72649c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.803109] env[61839]: DEBUG nova.compute.provider_tree [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.947558] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52236a36-27d5-27f5-9158-403c73e17d11, 'name': SearchDatastore_Task, 'duration_secs': 0.008538} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.948485] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab1a6a89-8a47-4076-9646-bcba9491ad10 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.953779] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 621.953779] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fcd2c3-338c-e889-5c47-ef43e85699ea" [ 621.953779] env[61839]: _type = "Task" [ 621.953779] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.961089] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fcd2c3-338c-e889-5c47-ef43e85699ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.306204] env[61839]: DEBUG nova.scheduler.client.report [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 622.466219] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fcd2c3-338c-e889-5c47-ef43e85699ea, 'name': SearchDatastore_Task, 'duration_secs': 0.00842} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.466637] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.467029] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.467570] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93b67b29-d029-47ff-831d-3077a3c96f55 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.475357] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 622.475357] env[61839]: value = "task-1314274" [ 622.475357] env[61839]: _type = "Task" [ 622.475357] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.482812] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314274, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.618485] env[61839]: DEBUG nova.compute.manager [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Received event network-changed-3e7ac607-a43c-4448-814a-c9266b8fe7db {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 622.618726] env[61839]: DEBUG nova.compute.manager [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Refreshing instance network info cache due to event network-changed-3e7ac607-a43c-4448-814a-c9266b8fe7db. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 622.618982] env[61839]: DEBUG oslo_concurrency.lockutils [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] Acquiring lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.619179] env[61839]: DEBUG oslo_concurrency.lockutils [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] Acquired lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.619392] env[61839]: DEBUG nova.network.neutron [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Refreshing network info cache for port 3e7ac607-a43c-4448-814a-c9266b8fe7db {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.734422] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 622.764283] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 622.764283] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 622.764803] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.764803] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 622.764954] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.765112] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 622.765319] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 622.765472] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 622.765635] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 622.769015] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 622.769015] env[61839]: DEBUG nova.virt.hardware [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 622.769015] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5b3eb0-c62e-4ebc-87ba-73049bf7390b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.777659] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49b9a28-4b41-46c5-8525-40c01a07c238 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.812841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.106s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.813728] env[61839]: ERROR nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Traceback (most recent call last): [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self.driver.spawn(context, instance, image_meta, [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] vm_ref = self.build_virtual_machine(instance, [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.813728] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] for vif in network_info: [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return self._sync_wrapper(fn, *args, **kwargs) [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self.wait() [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self[:] = self._gt.wait() [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return self._exit_event.wait() [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] result = hub.switch() [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.814259] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return self.greenlet.switch() [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] result = function(*args, **kwargs) [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] return func(*args, **kwargs) [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] raise e [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] nwinfo = self.network_api.allocate_for_instance( [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] created_port_ids = self._update_ports_for_instance( [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] with excutils.save_and_reraise_exception(): [ 622.814721] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] self.force_reraise() [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] raise self.value [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] updated_port = self._update_port( [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] _ensure_no_port_binding_failure(port) [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] raise exception.PortBindingFailed(port_id=port['id']) [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] nova.exception.PortBindingFailed: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. [ 622.815103] env[61839]: ERROR nova.compute.manager [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] [ 622.815412] env[61839]: DEBUG nova.compute.utils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 622.815657] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.601s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.817166] env[61839]: INFO nova.compute.claims [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.820160] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Build of instance 9e14bd1a-d6c2-4f4a-8919-27647ae5742b was re-scheduled: Binding failed for port bad8ef9f-10bb-4dde-9386-c21e605b7b42, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 622.820573] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 622.820808] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Acquiring lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.820956] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Acquired lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.821142] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.873883] env[61839]: ERROR nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. [ 622.873883] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 622.873883] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.873883] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 622.873883] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.873883] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 622.873883] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.873883] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 622.873883] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.873883] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 622.873883] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.873883] env[61839]: ERROR nova.compute.manager raise self.value [ 622.873883] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.873883] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 622.873883] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.873883] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 622.874408] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.874408] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 622.874408] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. [ 622.874408] env[61839]: ERROR nova.compute.manager [ 622.874408] env[61839]: Traceback (most recent call last): [ 622.874408] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 622.874408] env[61839]: listener.cb(fileno) [ 622.874408] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.874408] env[61839]: result = function(*args, **kwargs) [ 622.874408] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.874408] env[61839]: return func(*args, **kwargs) [ 622.874408] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.874408] env[61839]: raise e [ 622.874408] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.874408] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 622.874408] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.874408] env[61839]: created_port_ids = self._update_ports_for_instance( [ 622.874408] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.874408] env[61839]: with excutils.save_and_reraise_exception(): [ 622.874408] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.874408] env[61839]: self.force_reraise() [ 622.874408] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.874408] env[61839]: raise self.value [ 622.874408] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.874408] env[61839]: updated_port = self._update_port( [ 622.874408] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.874408] env[61839]: _ensure_no_port_binding_failure(port) [ 622.874408] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.874408] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 622.875193] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. [ 622.875193] env[61839]: Removing descriptor: 17 [ 622.875193] env[61839]: ERROR nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Traceback (most recent call last): [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] yield resources [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self.driver.spawn(context, instance, image_meta, [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.875193] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] vm_ref = self.build_virtual_machine(instance, [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] for vif in network_info: [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return self._sync_wrapper(fn, *args, **kwargs) [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self.wait() [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self[:] = self._gt.wait() [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return self._exit_event.wait() [ 622.875489] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] result = hub.switch() [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return self.greenlet.switch() [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] result = function(*args, **kwargs) [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return func(*args, **kwargs) [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] raise e [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] nwinfo = self.network_api.allocate_for_instance( [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.875904] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] created_port_ids = self._update_ports_for_instance( [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] with excutils.save_and_reraise_exception(): [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self.force_reraise() [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] raise self.value [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] updated_port = self._update_port( [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] _ensure_no_port_binding_failure(port) [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.876317] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] raise exception.PortBindingFailed(port_id=port['id']) [ 622.876608] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] nova.exception.PortBindingFailed: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. [ 622.876608] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] [ 622.876608] env[61839]: INFO nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Terminating instance [ 622.877546] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.984902] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314274, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475413} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.988342] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 622.988575] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 622.988827] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70cb3f2c-d8b2-41b3-af84-8b61b062fa9f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.995822] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 622.995822] env[61839]: value = "task-1314275" [ 622.995822] env[61839]: _type = "Task" [ 622.995822] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.004324] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.152309] env[61839]: DEBUG nova.network.neutron [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.255522] env[61839]: DEBUG nova.network.neutron [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.348797] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.461119] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.511347] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061687} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.511347] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 623.511704] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10af65d-5e47-4236-a396-8150ca339b05 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.537758] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 623.538058] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4e04c08-1b57-4ca9-9d9a-352ab09f2b11 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.557498] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 623.557498] env[61839]: value = "task-1314276" [ 623.557498] env[61839]: _type = "Task" [ 623.557498] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.565202] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314276, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.757897] env[61839]: DEBUG oslo_concurrency.lockutils [req-71764f79-9504-462e-9746-25d61d22c4b7 req-156602fa-b053-4a29-8644-89c7a38e16cb service nova] Releasing lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.758353] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.758539] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.967203] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Releasing lock "refresh_cache-9e14bd1a-d6c2-4f4a-8919-27647ae5742b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.967499] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 623.967714] env[61839]: DEBUG nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.968156] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.998310] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.068969] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.296758] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.315021] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b40c8d3-2221-47b2-abfd-040d7ed9481f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.324128] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f09099-8b3e-4530-b6ae-6e5222acb225 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.356854] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf95ad40-4ae0-4f6a-b774-96b5517a655b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.364505] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cd84a8-ec24-4632-8bd4-4817ad632d37 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.378313] env[61839]: DEBUG nova.compute.provider_tree [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.453666] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.499956] env[61839]: DEBUG nova.network.neutron [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.570582] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314276, 'name': ReconfigVM_Task, 'duration_secs': 0.740678} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.571013] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Reconfigured VM instance instance-00000019 to attach disk [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 624.571838] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb93c977-6a95-449f-b7f6-7290d8fb6176 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.579686] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 624.579686] env[61839]: value = "task-1314277" [ 624.579686] env[61839]: _type = "Task" [ 624.579686] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.589611] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314277, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.883420] env[61839]: DEBUG nova.scheduler.client.report [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 624.918447] env[61839]: DEBUG nova.compute.manager [req-f6d271d1-4469-4b43-bb52-e950456b5193 req-8e6774cd-8833-4eab-8e9e-a207ce52e64e service nova] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Received event network-vif-deleted-3e7ac607-a43c-4448-814a-c9266b8fe7db {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 624.956591] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.957042] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 624.957238] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.957540] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c98b41d2-fe6b-46a0-af59-a12ab1fec45c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.966603] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54bb176-7f73-4e1d-82dd-3e58e140c4e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.990987] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d9a24e0-c265-4255-964f-54c971c02ded could not be found. [ 624.990987] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.990987] env[61839]: INFO nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Took 0.03 seconds to destroy the instance on the hypervisor. [ 624.991170] env[61839]: DEBUG oslo.service.loopingcall [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.991274] env[61839]: DEBUG nova.compute.manager [-] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 624.991346] env[61839]: DEBUG nova.network.neutron [-] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.003177] env[61839]: INFO nova.compute.manager [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] [instance: 9e14bd1a-d6c2-4f4a-8919-27647ae5742b] Took 1.03 seconds to deallocate network for instance. [ 625.016323] env[61839]: DEBUG nova.network.neutron [-] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.089751] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314277, 'name': Rename_Task, 'duration_secs': 0.141363} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.090141] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 625.090328] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18dd9062-89a0-4dc0-a3af-768fe0edad21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.096369] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 625.096369] env[61839]: value = "task-1314278" [ 625.096369] env[61839]: _type = "Task" [ 625.096369] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.103590] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314278, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.260701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "e81bf730-9cf6-4728-aae4-4962115f8b6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.261107] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "e81bf730-9cf6-4728-aae4-4962115f8b6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.289688] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "697222e0-07e5-4a3d-adbe-d5d815cf4756" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.289978] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "697222e0-07e5-4a3d-adbe-d5d815cf4756" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.391047] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.391371] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 625.393921] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.413s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.520140] env[61839]: DEBUG nova.network.neutron [-] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.606430] env[61839]: DEBUG oslo_vmware.api [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314278, 'name': PowerOnVM_Task, 'duration_secs': 0.410847} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.606692] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 625.606925] env[61839]: INFO nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Took 5.32 seconds to spawn the instance on the hypervisor. [ 625.607157] env[61839]: DEBUG nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 625.607902] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f8a16e-5774-4d05-8788-18beea899617 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.899726] env[61839]: DEBUG nova.compute.utils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.905306] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 625.905519] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 625.970111] env[61839]: DEBUG nova.policy [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2968b88be68041e18b61f3e172f6511c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8062058eaa6c4189ba9d73f9bf2ab428', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 626.023981] env[61839]: INFO nova.compute.manager [-] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Took 1.03 seconds to deallocate network for instance. [ 626.030355] env[61839]: DEBUG nova.compute.claims [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 626.030737] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.044048] env[61839]: INFO nova.scheduler.client.report [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Deleted allocations for instance 9e14bd1a-d6c2-4f4a-8919-27647ae5742b [ 626.122752] env[61839]: INFO nova.compute.manager [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Took 27.32 seconds to build instance. [ 626.335440] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Successfully created port: e7dac565-024f-4a6b-8efc-d65497a1bbbf {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.406668] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 626.431640] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d95b34bb-bf0f-4a43-a5ad-6ae7770b606c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.431812] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 916834d8-4819-4167-8774-b0a665021ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.431941] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7fdd773e-3a96-4728-b162-0227a415bc96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.432122] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance f8b36479-70a1-4f4e-84f4-e3baf9a56c45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.432255] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 86a1fc77-26d9-44c7-8f1f-771315769619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.432374] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.432489] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7d9a24e0-c265-4255-964f-54c971c02ded actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.432602] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5a130776-5e5f-4eec-8574-08aa1f1ef97a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.552715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-35b58d39-5733-4319-be91-b1dc8063c680 tempest-FloatingIPsAssociationNegativeTestJSON-254000745 tempest-FloatingIPsAssociationNegativeTestJSON-254000745-project-member] Lock "9e14bd1a-d6c2-4f4a-8919-27647ae5742b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.928s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.625562] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d5b699c0-0044-418d-93de-9c5330e52324 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.027s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.936207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 328882b4-d1af-4036-b313-ecada7d53899 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 627.058546] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 627.128141] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 627.420803] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 627.443057] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 2670f16e-4c44-4b88-937e-9e491f599acb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 627.456282] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 627.456631] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 627.456698] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 627.456835] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 627.456978] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 627.458438] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 627.459038] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 627.459038] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 627.459038] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 627.461345] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 627.461611] env[61839]: DEBUG nova.virt.hardware [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 627.462555] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732db53a-98f2-4879-b082-7a4bb58d71d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.475040] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f08552-0204-4c42-ac1f-f9ec459ac532 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.581232] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.648735] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.682169] env[61839]: INFO nova.compute.manager [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Rebuilding instance [ 627.725625] env[61839]: DEBUG nova.compute.manager [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 627.726814] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75f889c-c6d9-4477-b6f5-1e54a625b6dc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.946776] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 2432a14e-ec45-452c-9592-de690dbc102e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 628.093329] env[61839]: DEBUG nova.compute.manager [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Received event network-changed-e7dac565-024f-4a6b-8efc-d65497a1bbbf {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 628.093537] env[61839]: DEBUG nova.compute.manager [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Refreshing instance network info cache due to event network-changed-e7dac565-024f-4a6b-8efc-d65497a1bbbf. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 628.093750] env[61839]: DEBUG oslo_concurrency.lockutils [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] Acquiring lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.093892] env[61839]: DEBUG oslo_concurrency.lockutils [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] Acquired lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.094144] env[61839]: DEBUG nova.network.neutron [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Refreshing network info cache for port e7dac565-024f-4a6b-8efc-d65497a1bbbf {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.239529] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 628.239857] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0315ff1-fae5-4643-9caf-a79a83efce7f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.247644] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 628.247644] env[61839]: value = "task-1314279" [ 628.247644] env[61839]: _type = "Task" [ 628.247644] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.256422] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.324349] env[61839]: ERROR nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. [ 628.324349] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 628.324349] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.324349] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 628.324349] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.324349] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 628.324349] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.324349] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 628.324349] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.324349] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 628.324349] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.324349] env[61839]: ERROR nova.compute.manager raise self.value [ 628.324349] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.324349] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 628.324349] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.324349] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 628.324794] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.324794] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 628.324794] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. [ 628.324794] env[61839]: ERROR nova.compute.manager [ 628.324794] env[61839]: Traceback (most recent call last): [ 628.324794] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 628.324794] env[61839]: listener.cb(fileno) [ 628.324794] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.324794] env[61839]: result = function(*args, **kwargs) [ 628.324794] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 628.324794] env[61839]: return func(*args, **kwargs) [ 628.324794] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.324794] env[61839]: raise e [ 628.324794] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.324794] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 628.324794] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.324794] env[61839]: created_port_ids = self._update_ports_for_instance( [ 628.324794] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.324794] env[61839]: with excutils.save_and_reraise_exception(): [ 628.324794] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.324794] env[61839]: self.force_reraise() [ 628.324794] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.324794] env[61839]: raise self.value [ 628.324794] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.324794] env[61839]: updated_port = self._update_port( [ 628.324794] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.324794] env[61839]: _ensure_no_port_binding_failure(port) [ 628.324794] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.324794] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 628.325508] env[61839]: nova.exception.PortBindingFailed: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. [ 628.325508] env[61839]: Removing descriptor: 17 [ 628.325508] env[61839]: ERROR nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Traceback (most recent call last): [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] yield resources [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self.driver.spawn(context, instance, image_meta, [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.325508] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] vm_ref = self.build_virtual_machine(instance, [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] for vif in network_info: [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return self._sync_wrapper(fn, *args, **kwargs) [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self.wait() [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self[:] = self._gt.wait() [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return self._exit_event.wait() [ 628.325835] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] result = hub.switch() [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return self.greenlet.switch() [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] result = function(*args, **kwargs) [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return func(*args, **kwargs) [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] raise e [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] nwinfo = self.network_api.allocate_for_instance( [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.326150] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] created_port_ids = self._update_ports_for_instance( [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] with excutils.save_and_reraise_exception(): [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self.force_reraise() [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] raise self.value [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] updated_port = self._update_port( [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] _ensure_no_port_binding_failure(port) [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.326450] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] raise exception.PortBindingFailed(port_id=port['id']) [ 628.326786] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] nova.exception.PortBindingFailed: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. [ 628.326786] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] [ 628.326786] env[61839]: INFO nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Terminating instance [ 628.331538] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Acquiring lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.453117] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 628.615207] env[61839]: DEBUG nova.network.neutron [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.760760] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314279, 'name': PowerOffVM_Task, 'duration_secs': 0.115556} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.761132] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 628.761292] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 628.762113] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d58482-0213-4c89-a6ad-31aa5d2ec138 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.771255] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 628.772149] env[61839]: DEBUG nova.network.neutron [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.773330] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df170082-b673-41e3-b8f3-c0fd59aa1864 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.800283] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 628.800283] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 628.800283] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Deleting the datastore file [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 628.800283] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efee4d1d-f764-453b-964b-99165727760b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.807162] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 628.807162] env[61839]: value = "task-1314281" [ 628.807162] env[61839]: _type = "Task" [ 628.807162] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.815421] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.961515] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 81ba4888-4b21-410f-ab86-a3068995836f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 629.276056] env[61839]: DEBUG oslo_concurrency.lockutils [req-ee78653d-b847-4fae-abeb-a11557a19365 req-510a2da7-ffbc-4bb8-a8d9-e22aa8997666 service nova] Releasing lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.276383] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Acquired lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.276578] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.317493] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102911} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.317739] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 629.317921] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 629.318111] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 629.463574] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 2619b21e-084f-4003-af13-80382bfb1e2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 629.794428] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.909560] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.968538] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 0ab450ab-6416-464d-8140-a8c320abb69c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 630.218592] env[61839]: DEBUG nova.compute.manager [req-bcdcbdca-29e6-4683-9f15-e09bd70ae4f0 req-929f62be-42ff-4f71-bb3f-501b01542c81 service nova] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Received event network-vif-deleted-e7dac565-024f-4a6b-8efc-d65497a1bbbf {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 630.346541] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 630.346779] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 630.346932] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.347124] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 630.347275] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.347418] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 630.347620] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 630.347776] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 630.347939] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 630.348110] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 630.348297] env[61839]: DEBUG nova.virt.hardware [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 630.349401] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127cf16f-2651-43d3-a5d8-80343c6421fe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.356983] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea79ec26-b6b0-45de-b77a-9569e97f62b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.369717] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.375387] env[61839]: DEBUG oslo.service.loopingcall [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.375602] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 630.375792] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce62d4ce-0a8c-4652-9eea-75ec343d40d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.392063] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.392063] env[61839]: value = "task-1314282" [ 630.392063] env[61839]: _type = "Task" [ 630.392063] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.399191] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314282, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.411810] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Releasing lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.412249] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 630.412445] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.412703] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b12bd1b-85cb-4da9-9bdc-e49ab8746e7e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.421182] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4376b152-82c3-4c4f-a116-9296a8c27db5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.442306] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5a130776-5e5f-4eec-8574-08aa1f1ef97a could not be found. [ 630.442570] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 630.442710] env[61839]: INFO nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 630.442951] env[61839]: DEBUG oslo.service.loopingcall [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.443220] env[61839]: DEBUG nova.compute.manager [-] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.443333] env[61839]: DEBUG nova.network.neutron [-] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 630.458454] env[61839]: DEBUG nova.network.neutron [-] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.463626] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.463954] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.473818] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3ea7af26-14b2-4371-a4f4-48afc190d4bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 630.902844] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314282, 'name': CreateVM_Task, 'duration_secs': 0.251512} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.902844] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 630.902982] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.903137] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.903452] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 630.903692] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4420e9-eae1-422b-92a2-1a77d8665701 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.908026] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 630.908026] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd73f8-d569-da64-647d-8501254ae9ab" [ 630.908026] env[61839]: _type = "Task" [ 630.908026] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.915343] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd73f8-d569-da64-647d-8501254ae9ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.963973] env[61839]: DEBUG nova.network.neutron [-] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.976288] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance ad32bc49-5e52-468a-9d93-390c8649dcae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 631.417809] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd73f8-d569-da64-647d-8501254ae9ab, 'name': SearchDatastore_Task, 'duration_secs': 0.009897} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.418122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.418356] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.418587] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.418729] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.418901] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 631.419163] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f314119e-a19c-4a0a-b0f7-f4d59f2f690d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.426203] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.426371] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 631.427011] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92baa564-0a58-44e4-b7e5-2e811642b4ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.431393] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 631.431393] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecc324-b0ea-9a37-3e0d-86276e69ddec" [ 631.431393] env[61839]: _type = "Task" [ 631.431393] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.438230] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecc324-b0ea-9a37-3e0d-86276e69ddec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.466718] env[61839]: INFO nova.compute.manager [-] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Took 1.02 seconds to deallocate network for instance. [ 631.468870] env[61839]: DEBUG nova.compute.claims [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 631.469055] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.478763] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 83270007-2cbd-49a5-b3a1-1ad58ea2a66c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 631.942366] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecc324-b0ea-9a37-3e0d-86276e69ddec, 'name': SearchDatastore_Task, 'duration_secs': 0.007539} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.943198] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cd2cc6b-153d-4f82-a874-dfa299e478da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.948631] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 631.948631] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b054ea-4e12-5bb8-1346-61bd552b0024" [ 631.948631] env[61839]: _type = "Task" [ 631.948631] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.956508] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b054ea-4e12-5bb8-1346-61bd552b0024, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.981383] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 632.459197] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b054ea-4e12-5bb8-1346-61bd552b0024, 'name': SearchDatastore_Task, 'duration_secs': 0.008804} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.459576] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.459905] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 632.460223] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8845b9c7-9ca0-4a4f-813d-d9040672cf63 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.466944] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 632.466944] env[61839]: value = "task-1314283" [ 632.466944] env[61839]: _type = "Task" [ 632.466944] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.473953] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.486755] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 97560b6e-0f50-4cc0-b620-305c82938390 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 632.976590] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314283, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453898} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.976864] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 632.977091] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 632.977342] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f64f6970-f4ab-4817-b97b-7c23c8ccda88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.983162] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 632.983162] env[61839]: value = "task-1314284" [ 632.983162] env[61839]: _type = "Task" [ 632.983162] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.990793] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d608405b-20d9-42ab-97e3-e129f9c1448b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 632.992144] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.494145] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance eca07795-319e-401d-8f05-41a29bab2689 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 633.505893] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.052153} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.508775] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 633.509883] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a29a30-7d53-4287-a1d0-08fac58637db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.546122] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 633.546756] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34cba8d9-38c2-4e23-9bc7-82201764d2a0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.571741] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 633.571741] env[61839]: value = "task-1314285" [ 633.571741] env[61839]: _type = "Task" [ 633.571741] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.579823] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.997141] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 634.081084] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314285, 'name': ReconfigVM_Task, 'duration_secs': 0.240917} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.081374] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Reconfigured VM instance instance-00000019 to attach disk [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d/d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 634.081979] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-203e7bcd-9dbd-4123-925b-a99c7535a485 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.088523] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 634.088523] env[61839]: value = "task-1314286" [ 634.088523] env[61839]: _type = "Task" [ 634.088523] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.096442] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314286, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.500687] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 4b7ef74e-4018-4c6e-b540-d65c986d1ff2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 634.598303] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314286, 'name': Rename_Task, 'duration_secs': 0.32312} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.598536] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 634.598764] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2d434dd-06e3-4b27-bc57-7f86d1b043f5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.604507] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 634.604507] env[61839]: value = "task-1314287" [ 634.604507] env[61839]: _type = "Task" [ 634.604507] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.614898] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314287, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.003695] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 56993a6d-de55-4648-9fd9-31d06a57f300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 635.115646] env[61839]: DEBUG oslo_vmware.api [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314287, 'name': PowerOnVM_Task, 'duration_secs': 0.393917} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.115646] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 635.115646] env[61839]: DEBUG nova.compute.manager [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 635.116142] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8ec111-e16c-4856-b7b7-707f21a20289 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.506488] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 50140f35-6282-41dc-a66c-f041f33769d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 635.631159] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.009472] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a89e30e6-b727-440f-a1e8-9c86d19c796d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 636.512931] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 337b31e7-a6c9-4f35-9936-62cff06fe2a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 636.742117] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.742492] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.742724] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.742911] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.743102] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.746335] env[61839]: INFO nova.compute.manager [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Terminating instance [ 636.748485] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "refresh_cache-d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.748718] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquired lock "refresh_cache-d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.748981] env[61839]: DEBUG nova.network.neutron [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.016222] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 59ea60d5-7296-480c-ac03-ec0a7c021300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 637.273337] env[61839]: DEBUG nova.network.neutron [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.340605] env[61839]: DEBUG nova.network.neutron [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.519411] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 619ec15b-463a-4daa-bffe-7d7a6022b962 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 637.843396] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Releasing lock "refresh_cache-d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.843843] env[61839]: DEBUG nova.compute.manager [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 637.844048] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 637.844933] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cfe0d6-3093-4342-bf96-fe759b6293f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.853026] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 637.853026] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac2b38f8-4aa3-480f-9e72-ae44224c2ee3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.859574] env[61839]: DEBUG oslo_vmware.api [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 637.859574] env[61839]: value = "task-1314288" [ 637.859574] env[61839]: _type = "Task" [ 637.859574] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.866857] env[61839]: DEBUG oslo_vmware.api [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.022571] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 821b784d-dc69-4c54-bccf-76693c34e19d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.369528] env[61839]: DEBUG oslo_vmware.api [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314288, 'name': PowerOffVM_Task, 'duration_secs': 0.121614} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.369830] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 638.369997] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 638.370288] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e5d6a6b-ede0-4908-a71f-a17a5065a64b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.398682] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 638.398771] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 638.398897] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Deleting the datastore file [datastore1] d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 638.399184] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1ee21ec-cc8b-4d08-9d6c-a6211b41a1cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.405335] env[61839]: DEBUG oslo_vmware.api [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for the task: (returnval){ [ 638.405335] env[61839]: value = "task-1314290" [ 638.405335] env[61839]: _type = "Task" [ 638.405335] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.413698] env[61839]: DEBUG oslo_vmware.api [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.525968] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance e81bf730-9cf6-4728-aae4-4962115f8b6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.915024] env[61839]: DEBUG oslo_vmware.api [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Task: {'id': task-1314290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087036} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.915024] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 638.915189] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 638.915366] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 638.915552] env[61839]: INFO nova.compute.manager [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Took 1.07 seconds to destroy the instance on the hypervisor. [ 638.915790] env[61839]: DEBUG oslo.service.loopingcall [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 638.915974] env[61839]: DEBUG nova.compute.manager [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.916081] env[61839]: DEBUG nova.network.neutron [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 638.929710] env[61839]: DEBUG nova.network.neutron [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.029706] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 697222e0-07e5-4a3d-adbe-d5d815cf4756 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.030148] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 639.030381] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 639.385604] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5366e7d-426c-46b9-b37f-7252fefba50d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.394030] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d77e0e-1fed-4fd3-bd26-c842ee16b2f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.425608] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f36395-ca5d-41e3-91dc-727367843d22 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.431968] env[61839]: DEBUG nova.network.neutron [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.434131] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c80286-8608-4808-8e1d-944a8d07b8b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.447677] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.939706] env[61839]: INFO nova.compute.manager [-] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Took 1.02 seconds to deallocate network for instance. [ 639.951193] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.457577] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.460398] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 640.460616] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 15.067s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.460914] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.280s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.952452] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.952781] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 641.336304] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fef5187-4b53-4772-a0d3-a52a06a69d9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.344123] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307e6bb9-d501-4633-a8ca-b63b01cb10dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.376774] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd520e8-2af9-4a13-8941-bacf264fe023 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.384721] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41356ba2-8dc5-443d-8a0e-2bf8f824861c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.397980] env[61839]: DEBUG nova.compute.provider_tree [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.457058] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Didn't find any instances for network info cache update. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 641.457058] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.457195] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.457311] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.457442] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 641.902793] env[61839]: DEBUG nova.scheduler.client.report [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.408016] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.947s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.408696] env[61839]: ERROR nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Traceback (most recent call last): [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self.driver.spawn(context, instance, image_meta, [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] vm_ref = self.build_virtual_machine(instance, [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.408696] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] for vif in network_info: [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return self._sync_wrapper(fn, *args, **kwargs) [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self.wait() [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self[:] = self._gt.wait() [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return self._exit_event.wait() [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] result = hub.switch() [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.409053] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return self.greenlet.switch() [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] result = function(*args, **kwargs) [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] return func(*args, **kwargs) [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] raise e [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] nwinfo = self.network_api.allocate_for_instance( [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] created_port_ids = self._update_ports_for_instance( [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] with excutils.save_and_reraise_exception(): [ 642.409415] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] self.force_reraise() [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] raise self.value [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] updated_port = self._update_port( [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] _ensure_no_port_binding_failure(port) [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] raise exception.PortBindingFailed(port_id=port['id']) [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] nova.exception.PortBindingFailed: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. [ 642.409807] env[61839]: ERROR nova.compute.manager [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] [ 642.410178] env[61839]: DEBUG nova.compute.utils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 642.411048] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.222s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.413568] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Build of instance d95b34bb-bf0f-4a43-a5ad-6ae7770b606c was re-scheduled: Binding failed for port c9495401-a074-473f-9341-2ef8ba8ae699, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 642.414053] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 642.414294] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.414443] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquired lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.414643] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.944164] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.036253] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.279474] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630e4d49-335c-40d3-b40e-c29a6829a930 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.286383] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c6fd1d-a5ec-435c-b5ab-589f48777db9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.317108] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0145ba5b-175b-4750-aea1-cd6ad477292c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.325340] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2124c65-8dfa-4672-874b-18bfd3a39b16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.339704] env[61839]: DEBUG nova.compute.provider_tree [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.539528] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Releasing lock "refresh_cache-d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.539822] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 643.539952] env[61839]: DEBUG nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 643.540136] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.556716] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.842669] env[61839]: DEBUG nova.scheduler.client.report [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.059067] env[61839]: DEBUG nova.network.neutron [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.349044] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.937s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.349044] env[61839]: ERROR nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. [ 644.349044] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] Traceback (most recent call last): [ 644.349044] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 644.349044] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self.driver.spawn(context, instance, image_meta, [ 644.349044] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 644.349044] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 644.349044] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 644.349044] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] vm_ref = self.build_virtual_machine(instance, [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] vif_infos = vmwarevif.get_vif_info(self._session, [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] for vif in network_info: [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return self._sync_wrapper(fn, *args, **kwargs) [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self.wait() [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self[:] = self._gt.wait() [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return self._exit_event.wait() [ 644.349554] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] result = hub.switch() [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return self.greenlet.switch() [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] result = function(*args, **kwargs) [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] return func(*args, **kwargs) [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] raise e [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] nwinfo = self.network_api.allocate_for_instance( [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 644.349862] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] created_port_ids = self._update_ports_for_instance( [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] with excutils.save_and_reraise_exception(): [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] self.force_reraise() [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] raise self.value [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] updated_port = self._update_port( [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] _ensure_no_port_binding_failure(port) [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.350252] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] raise exception.PortBindingFailed(port_id=port['id']) [ 644.350588] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] nova.exception.PortBindingFailed: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. [ 644.350588] env[61839]: ERROR nova.compute.manager [instance: 916834d8-4819-4167-8774-b0a665021ef8] [ 644.350588] env[61839]: DEBUG nova.compute.utils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 644.350588] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.824s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.352932] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Build of instance 916834d8-4819-4167-8774-b0a665021ef8 was re-scheduled: Binding failed for port 245851ea-1302-48e0-a961-6cf5b78f0b43, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 644.353327] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 644.353568] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Acquiring lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.353715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Acquired lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.353876] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.561495] env[61839]: INFO nova.compute.manager [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: d95b34bb-bf0f-4a43-a5ad-6ae7770b606c] Took 1.02 seconds to deallocate network for instance. [ 644.874368] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.993338] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.303335] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05884ff9-794d-47ce-8b9f-3b07ca5819ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.311056] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3dcdede-b0d6-42d4-af42-b469c11538f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.342659] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6dda5b-673e-464d-a21a-38aa5e3b3dbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.350424] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154beaa2-8077-408c-9d99-d32a655faf91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.365626] env[61839]: DEBUG nova.compute.provider_tree [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.495649] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Releasing lock "refresh_cache-916834d8-4819-4167-8774-b0a665021ef8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.495897] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 645.496107] env[61839]: DEBUG nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 645.496284] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 645.518708] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.589521] env[61839]: INFO nova.scheduler.client.report [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Deleted allocations for instance d95b34bb-bf0f-4a43-a5ad-6ae7770b606c [ 645.869144] env[61839]: DEBUG nova.scheduler.client.report [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.020800] env[61839]: DEBUG nova.network.neutron [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.096944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96726cf8-093a-469e-ae4c-d851d34fa48d tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "d95b34bb-bf0f-4a43-a5ad-6ae7770b606c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.126s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.373523] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.023s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.374129] env[61839]: ERROR nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Traceback (most recent call last): [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self.driver.spawn(context, instance, image_meta, [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] vm_ref = self.build_virtual_machine(instance, [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] vif_infos = vmwarevif.get_vif_info(self._session, [ 646.374129] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] for vif in network_info: [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return self._sync_wrapper(fn, *args, **kwargs) [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self.wait() [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self[:] = self._gt.wait() [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return self._exit_event.wait() [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] result = hub.switch() [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 646.374476] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return self.greenlet.switch() [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] result = function(*args, **kwargs) [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] return func(*args, **kwargs) [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] raise e [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] nwinfo = self.network_api.allocate_for_instance( [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] created_port_ids = self._update_ports_for_instance( [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] with excutils.save_and_reraise_exception(): [ 646.374841] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] self.force_reraise() [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] raise self.value [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] updated_port = self._update_port( [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] _ensure_no_port_binding_failure(port) [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] raise exception.PortBindingFailed(port_id=port['id']) [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] nova.exception.PortBindingFailed: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. [ 646.375220] env[61839]: ERROR nova.compute.manager [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] [ 646.375531] env[61839]: DEBUG nova.compute.utils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 646.376120] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.796s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.379094] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Build of instance 7fdd773e-3a96-4728-b162-0227a415bc96 was re-scheduled: Binding failed for port daf6e1bb-8088-4f75-aea6-a7512ec93136, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 646.379508] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 646.379734] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.379876] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquired lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.380163] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.489954] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 646.490207] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 646.523087] env[61839]: INFO nova.compute.manager [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] [instance: 916834d8-4819-4167-8774-b0a665021ef8] Took 1.03 seconds to deallocate network for instance. [ 646.601033] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 646.902238] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.972190] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.117683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.253957] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdae12a-24f4-4cf7-a2b1-acc3fb9950a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.261573] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06366b55-43cc-40fc-9ee2-46b1382f7d52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.291975] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e1bd37-a8c2-44d4-b2ac-fe4cccf63ecb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.299269] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73bd900-8263-40e1-a89c-dedbc0694aca {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.311885] env[61839]: DEBUG nova.compute.provider_tree [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.474365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Releasing lock "refresh_cache-7fdd773e-3a96-4728-b162-0227a415bc96" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.474624] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 647.474825] env[61839]: DEBUG nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 647.474990] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.484380] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.488922] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.489077] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 647.489198] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 647.499393] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.550327] env[61839]: INFO nova.scheduler.client.report [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Deleted allocations for instance 916834d8-4819-4167-8774-b0a665021ef8 [ 647.817019] env[61839]: DEBUG nova.scheduler.client.report [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.993345] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 647.993537] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 647.993647] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 648.001412] env[61839]: DEBUG nova.network.neutron [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.009010] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.009156] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.009303] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 648.009460] env[61839]: DEBUG nova.objects.instance [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lazy-loading 'info_cache' on Instance uuid d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 648.062428] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f01d48c-4eb1-4941-a48e-1320be5c5916 tempest-FloatingIPsAssociationTestJSON-751321575 tempest-FloatingIPsAssociationTestJSON-751321575-project-member] Lock "916834d8-4819-4167-8774-b0a665021ef8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.428s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.321321] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.945s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.321836] env[61839]: ERROR nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Traceback (most recent call last): [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self.driver.spawn(context, instance, image_meta, [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] vm_ref = self.build_virtual_machine(instance, [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] vif_infos = vmwarevif.get_vif_info(self._session, [ 648.321836] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] for vif in network_info: [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return self._sync_wrapper(fn, *args, **kwargs) [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self.wait() [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self[:] = self._gt.wait() [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return self._exit_event.wait() [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] result = hub.switch() [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 648.322158] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return self.greenlet.switch() [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] result = function(*args, **kwargs) [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] return func(*args, **kwargs) [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] raise e [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] nwinfo = self.network_api.allocate_for_instance( [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] created_port_ids = self._update_ports_for_instance( [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] with excutils.save_and_reraise_exception(): [ 648.322576] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] self.force_reraise() [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] raise self.value [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] updated_port = self._update_port( [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] _ensure_no_port_binding_failure(port) [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] raise exception.PortBindingFailed(port_id=port['id']) [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] nova.exception.PortBindingFailed: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. [ 648.322887] env[61839]: ERROR nova.compute.manager [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] [ 648.323168] env[61839]: DEBUG nova.compute.utils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 648.323874] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.915s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.328141] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Build of instance f8b36479-70a1-4f4e-84f4-e3baf9a56c45 was re-scheduled: Binding failed for port 6885c1b8-b5bc-4e67-963a-1bc48d70db6b, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 648.328141] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 648.328141] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.328141] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquired lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.328378] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.506731] env[61839]: INFO nova.compute.manager [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 7fdd773e-3a96-4728-b162-0227a415bc96] Took 1.03 seconds to deallocate network for instance. [ 648.565656] env[61839]: DEBUG nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 648.860533] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.981818] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.038548] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.088575] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.265592] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3345afa9-f2e9-430a-a842-1461f2e8b2b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.277305] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ded1de-876c-421d-8cb7-01f279c43612 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.313038] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf08cfed-c5c5-427c-9fbe-4a37908aa853 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.321023] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19703ebe-64e8-4d29-8e1f-2a285a2d8d7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.337133] env[61839]: DEBUG nova.compute.provider_tree [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.487851] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Releasing lock "refresh_cache-f8b36479-70a1-4f4e-84f4-e3baf9a56c45" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.488369] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 649.488703] env[61839]: DEBUG nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 649.490801] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 649.517160] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.548075] env[61839]: INFO nova.scheduler.client.report [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Deleted allocations for instance 7fdd773e-3a96-4728-b162-0227a415bc96 [ 649.627530] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.840819] env[61839]: DEBUG nova.scheduler.client.report [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 650.020511] env[61839]: DEBUG nova.network.neutron [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.054028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eb9c6f05-d841-4e49-8f1a-c00c24c2a91c tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "7fdd773e-3a96-4728-b162-0227a415bc96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.866s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.132731] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.132966] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 650.133543] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.133726] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.133875] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.134035] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.134176] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 650.134325] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.346110] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.022s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.346609] env[61839]: ERROR nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Traceback (most recent call last): [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self.driver.spawn(context, instance, image_meta, [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] vm_ref = self.build_virtual_machine(instance, [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.346609] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] for vif in network_info: [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return self._sync_wrapper(fn, *args, **kwargs) [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self.wait() [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self[:] = self._gt.wait() [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return self._exit_event.wait() [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] result = hub.switch() [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.347270] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return self.greenlet.switch() [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] result = function(*args, **kwargs) [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] return func(*args, **kwargs) [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] raise e [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] nwinfo = self.network_api.allocate_for_instance( [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] created_port_ids = self._update_ports_for_instance( [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] with excutils.save_and_reraise_exception(): [ 650.347721] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] self.force_reraise() [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] raise self.value [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] updated_port = self._update_port( [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] _ensure_no_port_binding_failure(port) [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] raise exception.PortBindingFailed(port_id=port['id']) [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] nova.exception.PortBindingFailed: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. [ 650.348040] env[61839]: ERROR nova.compute.manager [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] [ 650.348331] env[61839]: DEBUG nova.compute.utils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 650.348594] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.837s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.350963] env[61839]: INFO nova.compute.claims [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.354132] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Build of instance 86a1fc77-26d9-44c7-8f1f-771315769619 was re-scheduled: Binding failed for port 96fcea9f-31d0-41ce-8278-a9e04715c1d5, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 650.354672] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 650.354997] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquiring lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.355139] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Acquired lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.355350] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.523739] env[61839]: INFO nova.compute.manager [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: f8b36479-70a1-4f4e-84f4-e3baf9a56c45] Took 1.03 seconds to deallocate network for instance. [ 650.558442] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 650.636981] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.880242] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.048860] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.091820] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.550243] env[61839]: INFO nova.scheduler.client.report [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Deleted allocations for instance f8b36479-70a1-4f4e-84f4-e3baf9a56c45 [ 651.555818] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Releasing lock "refresh_cache-86a1fc77-26d9-44c7-8f1f-771315769619" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.555974] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 651.556163] env[61839]: DEBUG nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 651.556327] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.589511] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.749630] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.749716] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.802061] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9672b6c-800d-4457-956d-f1dcbcaa4904 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.810088] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4dbc030-8fb5-4c41-9146-411eb4235a70 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.838713] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c4e19a-83e3-4d69-bc32-f0382a6a3014 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.845751] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb6e707-24a8-45fb-ac3b-b73276d28b69 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.858291] env[61839]: DEBUG nova.compute.provider_tree [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.062758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3986eaa2-d0de-4161-82e0-803ae69da9c7 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "f8b36479-70a1-4f4e-84f4-e3baf9a56c45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.956s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.091351] env[61839]: DEBUG nova.network.neutron [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.361187] env[61839]: DEBUG nova.scheduler.client.report [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 652.566214] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 652.599678] env[61839]: INFO nova.compute.manager [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] [instance: 86a1fc77-26d9-44c7-8f1f-771315769619] Took 1.04 seconds to deallocate network for instance. [ 652.872921] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.524s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.873521] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 652.880020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.846s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.089010] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.385202] env[61839]: DEBUG nova.compute.utils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 653.386761] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 653.386940] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.465890] env[61839]: DEBUG nova.policy [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d1ec7eb5f5142ce9ee61bbb5b54cd36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9037e5252645477eaf8cd8529ab5b63b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 653.639487] env[61839]: INFO nova.scheduler.client.report [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Deleted allocations for instance 86a1fc77-26d9-44c7-8f1f-771315769619 [ 653.890636] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 653.900575] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b65a0be-04ac-44e0-8643-7982985fb165 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.908337] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e38cc1-17e4-4feb-9e1f-56181f0c3efe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.940601] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1e454d-0d2c-4b61-b4da-df4ecf755a43 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.949114] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252b8b4c-1df1-47aa-bd06-ff87b2340902 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.963198] env[61839]: DEBUG nova.compute.provider_tree [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.150252] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f03d1837-14e7-4fc0-adb1-ca8100ce1425 tempest-ListServerFiltersTestJSON-1176161750 tempest-ListServerFiltersTestJSON-1176161750-project-member] Lock "86a1fc77-26d9-44c7-8f1f-771315769619" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.960s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.178648] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Successfully created port: 6ebbd250-cd9f-40d5-a18c-b3210da6abe9 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.465999] env[61839]: DEBUG nova.scheduler.client.report [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 654.653866] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 654.903960] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 654.927855] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.927855] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.927855] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.928032] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.928032] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.928394] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.929245] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.929563] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.929894] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.930302] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.930626] env[61839]: DEBUG nova.virt.hardware [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.931960] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfc71e6-d1f7-4ab4-bc8d-4535c7c943e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.942495] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4260e6a-b99f-4327-b72d-57cf225f68bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.971535] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.095s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.972566] env[61839]: ERROR nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Traceback (most recent call last): [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self.driver.spawn(context, instance, image_meta, [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] vm_ref = self.build_virtual_machine(instance, [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.972566] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] for vif in network_info: [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return self._sync_wrapper(fn, *args, **kwargs) [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self.wait() [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self[:] = self._gt.wait() [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return self._exit_event.wait() [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] result = hub.switch() [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.972896] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return self.greenlet.switch() [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] result = function(*args, **kwargs) [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] return func(*args, **kwargs) [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] raise e [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] nwinfo = self.network_api.allocate_for_instance( [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] created_port_ids = self._update_ports_for_instance( [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] with excutils.save_and_reraise_exception(): [ 654.973238] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] self.force_reraise() [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] raise self.value [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] updated_port = self._update_port( [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] _ensure_no_port_binding_failure(port) [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] raise exception.PortBindingFailed(port_id=port['id']) [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] nova.exception.PortBindingFailed: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. [ 654.973583] env[61839]: ERROR nova.compute.manager [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] [ 654.974213] env[61839]: DEBUG nova.compute.utils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 654.975714] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.395s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.983279] env[61839]: INFO nova.compute.claims [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.983279] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Build of instance 7d9a24e0-c265-4255-964f-54c971c02ded was re-scheduled: Binding failed for port 3e7ac607-a43c-4448-814a-c9266b8fe7db, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 654.983279] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 654.983279] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.983279] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.983593] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.179615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.510770] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.721353] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.928775] env[61839]: ERROR nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. [ 655.928775] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 655.928775] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.928775] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 655.928775] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.928775] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 655.928775] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.928775] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 655.928775] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.928775] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 655.928775] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.928775] env[61839]: ERROR nova.compute.manager raise self.value [ 655.928775] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.928775] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 655.928775] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.928775] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 655.929803] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.929803] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 655.929803] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. [ 655.929803] env[61839]: ERROR nova.compute.manager [ 655.929803] env[61839]: Traceback (most recent call last): [ 655.929803] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 655.929803] env[61839]: listener.cb(fileno) [ 655.929803] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.929803] env[61839]: result = function(*args, **kwargs) [ 655.929803] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.929803] env[61839]: return func(*args, **kwargs) [ 655.929803] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 655.929803] env[61839]: raise e [ 655.929803] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.929803] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 655.929803] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.929803] env[61839]: created_port_ids = self._update_ports_for_instance( [ 655.929803] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.929803] env[61839]: with excutils.save_and_reraise_exception(): [ 655.929803] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.929803] env[61839]: self.force_reraise() [ 655.929803] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.929803] env[61839]: raise self.value [ 655.929803] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.929803] env[61839]: updated_port = self._update_port( [ 655.929803] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.929803] env[61839]: _ensure_no_port_binding_failure(port) [ 655.929803] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.929803] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 655.930667] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. [ 655.930667] env[61839]: Removing descriptor: 17 [ 655.930667] env[61839]: ERROR nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] Traceback (most recent call last): [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] yield resources [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self.driver.spawn(context, instance, image_meta, [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self._vmops.spawn(context, instance, image_meta, injected_files, [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 655.930667] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] vm_ref = self.build_virtual_machine(instance, [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] vif_infos = vmwarevif.get_vif_info(self._session, [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] for vif in network_info: [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return self._sync_wrapper(fn, *args, **kwargs) [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self.wait() [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self[:] = self._gt.wait() [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return self._exit_event.wait() [ 655.931035] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] result = hub.switch() [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return self.greenlet.switch() [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] result = function(*args, **kwargs) [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return func(*args, **kwargs) [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] raise e [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] nwinfo = self.network_api.allocate_for_instance( [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.931452] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] created_port_ids = self._update_ports_for_instance( [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] with excutils.save_and_reraise_exception(): [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self.force_reraise() [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] raise self.value [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] updated_port = self._update_port( [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] _ensure_no_port_binding_failure(port) [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.932195] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] raise exception.PortBindingFailed(port_id=port['id']) [ 655.932820] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] nova.exception.PortBindingFailed: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. [ 655.932820] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] [ 655.932820] env[61839]: INFO nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Terminating instance [ 655.932820] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Acquiring lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.932820] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Acquired lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.932820] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.949293] env[61839]: DEBUG nova.compute.manager [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Received event network-changed-6ebbd250-cd9f-40d5-a18c-b3210da6abe9 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 655.949483] env[61839]: DEBUG nova.compute.manager [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Refreshing instance network info cache due to event network-changed-6ebbd250-cd9f-40d5-a18c-b3210da6abe9. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 655.949668] env[61839]: DEBUG oslo_concurrency.lockutils [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] Acquiring lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.233242] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "refresh_cache-7d9a24e0-c265-4255-964f-54c971c02ded" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.233513] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 656.233659] env[61839]: DEBUG nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 656.233827] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 656.256592] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.401057] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d02808-be08-4d5f-bc2c-dbec4a5b5d03 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.409048] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c794860-fb32-4644-bbe4-0a3a4e462100 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.442694] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da1212d-cfc1-4d1c-b3bf-811bdb6b1746 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.449981] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41089ec3-0f6c-4a9f-99c3-d2a3d86beee6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.462608] env[61839]: DEBUG nova.compute.provider_tree [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.464319] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.601306] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.760183] env[61839]: DEBUG nova.network.neutron [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.968507] env[61839]: DEBUG nova.scheduler.client.report [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.109459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Releasing lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.111925] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 657.112312] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 657.112724] env[61839]: DEBUG oslo_concurrency.lockutils [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] Acquired lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.112914] env[61839]: DEBUG nova.network.neutron [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Refreshing network info cache for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 657.114091] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6546638-9ea7-4e25-ae4a-9eeee32a5202 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.124059] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee47d122-e02a-4376-a83b-ac357759b7b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.148820] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 328882b4-d1af-4036-b313-ecada7d53899 could not be found. [ 657.149128] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 657.149358] env[61839]: INFO nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Took 0.04 seconds to destroy the instance on the hypervisor. [ 657.149651] env[61839]: DEBUG oslo.service.loopingcall [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 657.149885] env[61839]: DEBUG nova.compute.manager [-] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 657.149980] env[61839]: DEBUG nova.network.neutron [-] [instance: 328882b4-d1af-4036-b313-ecada7d53899] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 657.178155] env[61839]: DEBUG nova.network.neutron [-] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.265949] env[61839]: INFO nova.compute.manager [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 7d9a24e0-c265-4255-964f-54c971c02ded] Took 1.03 seconds to deallocate network for instance. [ 657.472671] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.473206] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 657.475853] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.827s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.477695] env[61839]: INFO nova.compute.claims [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.680459] env[61839]: DEBUG nova.network.neutron [-] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.796027] env[61839]: DEBUG nova.network.neutron [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.934392] env[61839]: DEBUG nova.network.neutron [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.982515] env[61839]: DEBUG nova.compute.utils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 657.987080] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 657.987318] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.067357] env[61839]: DEBUG nova.policy [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e44e37d94bce4591a5b39b5afb483229', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6eaa6f1803f342578188db9a4b136e1f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.183429] env[61839]: INFO nova.compute.manager [-] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Took 1.03 seconds to deallocate network for instance. [ 658.185895] env[61839]: DEBUG nova.compute.claims [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 658.186088] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.304953] env[61839]: DEBUG nova.compute.manager [req-1a820d6f-9449-4acd-a5aa-f32dba38a099 req-d6274345-bd5b-402a-b204-ce7d2fef1465 service nova] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Received event network-vif-deleted-6ebbd250-cd9f-40d5-a18c-b3210da6abe9 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 658.312225] env[61839]: INFO nova.scheduler.client.report [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted allocations for instance 7d9a24e0-c265-4255-964f-54c971c02ded [ 658.438997] env[61839]: DEBUG oslo_concurrency.lockutils [req-903fc736-3672-4a99-a7c7-148f7480c5b5 req-e3c49b9e-036e-4ed7-afe8-33be4cbf8b96 service nova] Releasing lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.490443] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 658.497622] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Successfully created port: 64db97a0-6fce-45d1-904c-ce34410e38b0 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.823271] env[61839]: DEBUG oslo_concurrency.lockutils [None req-db043505-be83-48ec-924b-c8d945a5cedf tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "7d9a24e0-c265-4255-964f-54c971c02ded" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.389s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.994283] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef747d8-977f-4bc4-98f7-248b79fffab0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.008109] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea6747e-1d37-4507-a53c-c8f1aec77353 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.050204] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051c70ae-3a9a-430d-8abe-cf1d63817167 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.059890] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377184a6-832b-4e5f-80ca-74d14ebe2682 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.083497] env[61839]: DEBUG nova.compute.provider_tree [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.326421] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 659.504987] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 659.538731] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.538945] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.539077] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.539265] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.539409] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.539553] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.539762] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.539907] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.540081] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.540242] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.540517] env[61839]: DEBUG nova.virt.hardware [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.544319] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd11f492-2906-49b5-b673-30f67a577254 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.551376] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbe5136-22ea-46ab-bc07-ef6cc2d1c914 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.587236] env[61839]: DEBUG nova.scheduler.client.report [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.853643] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.092714] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.617s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.093233] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 660.096264] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.627s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.141951] env[61839]: ERROR nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. [ 660.141951] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 660.141951] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.141951] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 660.141951] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.141951] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 660.141951] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.141951] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 660.141951] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.141951] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 660.141951] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.141951] env[61839]: ERROR nova.compute.manager raise self.value [ 660.141951] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.141951] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 660.141951] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.141951] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 660.142828] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.142828] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 660.142828] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. [ 660.142828] env[61839]: ERROR nova.compute.manager [ 660.142828] env[61839]: Traceback (most recent call last): [ 660.142828] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 660.142828] env[61839]: listener.cb(fileno) [ 660.142828] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 660.142828] env[61839]: result = function(*args, **kwargs) [ 660.142828] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 660.142828] env[61839]: return func(*args, **kwargs) [ 660.142828] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 660.142828] env[61839]: raise e [ 660.142828] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.142828] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 660.142828] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.142828] env[61839]: created_port_ids = self._update_ports_for_instance( [ 660.142828] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.142828] env[61839]: with excutils.save_and_reraise_exception(): [ 660.142828] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.142828] env[61839]: self.force_reraise() [ 660.142828] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.142828] env[61839]: raise self.value [ 660.142828] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.142828] env[61839]: updated_port = self._update_port( [ 660.142828] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.142828] env[61839]: _ensure_no_port_binding_failure(port) [ 660.142828] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.142828] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 660.144333] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. [ 660.144333] env[61839]: Removing descriptor: 17 [ 660.144333] env[61839]: ERROR nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Traceback (most recent call last): [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] yield resources [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self.driver.spawn(context, instance, image_meta, [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 660.144333] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] vm_ref = self.build_virtual_machine(instance, [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] vif_infos = vmwarevif.get_vif_info(self._session, [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] for vif in network_info: [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return self._sync_wrapper(fn, *args, **kwargs) [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self.wait() [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self[:] = self._gt.wait() [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return self._exit_event.wait() [ 660.146256] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] result = hub.switch() [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return self.greenlet.switch() [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] result = function(*args, **kwargs) [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return func(*args, **kwargs) [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] raise e [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] nwinfo = self.network_api.allocate_for_instance( [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.146871] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] created_port_ids = self._update_ports_for_instance( [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] with excutils.save_and_reraise_exception(): [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self.force_reraise() [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] raise self.value [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] updated_port = self._update_port( [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] _ensure_no_port_binding_failure(port) [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.147206] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] raise exception.PortBindingFailed(port_id=port['id']) [ 660.147612] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] nova.exception.PortBindingFailed: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. [ 660.147612] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] [ 660.147612] env[61839]: INFO nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Terminating instance [ 660.150162] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Acquiring lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.150162] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Acquired lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.150384] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.605277] env[61839]: DEBUG nova.compute.utils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.608834] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 660.609509] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.636500] env[61839]: DEBUG nova.compute.manager [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Received event network-changed-64db97a0-6fce-45d1-904c-ce34410e38b0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 660.636687] env[61839]: DEBUG nova.compute.manager [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Refreshing instance network info cache due to event network-changed-64db97a0-6fce-45d1-904c-ce34410e38b0. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 660.636775] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] Acquiring lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.685237] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.699454] env[61839]: DEBUG nova.policy [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '244dffb9ecaf446188bc5ff26de13483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b285c9208067449eb1a67169170e7d10', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 660.865404] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "e65da0fd-e877-4b25-a319-e4d65397056a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.865404] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "e65da0fd-e877-4b25-a319-e4d65397056a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.011536] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.062725] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0abbf9-c07e-4003-ab54-0babd5800a40 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.071556] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766e0204-3ded-40cd-83a6-52ceeb5feeee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.102525] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b233caf5-0083-4935-b6a6-4931ae6634ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.108967] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 661.112675] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957cabbd-e574-410b-9e1e-713973e02b38 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.130682] env[61839]: DEBUG nova.compute.provider_tree [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.443820] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Successfully created port: a1a17fb4-0f77-408c-9dd1-324dedc031a7 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.515862] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Releasing lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.515862] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 661.515862] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.515862] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] Acquired lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.516113] env[61839]: DEBUG nova.network.neutron [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Refreshing network info cache for port 64db97a0-6fce-45d1-904c-ce34410e38b0 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 661.517123] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07b95014-966e-49b1-b9fe-b77c03871466 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.526668] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2697288-eff6-41b6-96b4-a6139cf44ac3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.548560] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2670f16e-4c44-4b88-937e-9e491f599acb could not be found. [ 661.548773] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 661.548951] env[61839]: INFO nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Took 0.03 seconds to destroy the instance on the hypervisor. [ 661.549232] env[61839]: DEBUG oslo.service.loopingcall [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.549490] env[61839]: DEBUG nova.compute.manager [-] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 661.549584] env[61839]: DEBUG nova.network.neutron [-] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 661.579320] env[61839]: DEBUG nova.network.neutron [-] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.636121] env[61839]: DEBUG nova.scheduler.client.report [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 662.052313] env[61839]: DEBUG nova.network.neutron [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.081223] env[61839]: DEBUG nova.network.neutron [-] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.123843] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 662.138196] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "5c29c188-a34b-4751-9f8b-166af7b15088" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.138549] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "5c29c188-a34b-4751-9f8b-166af7b15088" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.143911] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.048s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.144538] env[61839]: ERROR nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Traceback (most recent call last): [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self.driver.spawn(context, instance, image_meta, [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] vm_ref = self.build_virtual_machine(instance, [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.144538] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] for vif in network_info: [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return self._sync_wrapper(fn, *args, **kwargs) [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self.wait() [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self[:] = self._gt.wait() [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return self._exit_event.wait() [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] result = hub.switch() [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 662.144871] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return self.greenlet.switch() [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] result = function(*args, **kwargs) [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] return func(*args, **kwargs) [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] raise e [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] nwinfo = self.network_api.allocate_for_instance( [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] created_port_ids = self._update_ports_for_instance( [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] with excutils.save_and_reraise_exception(): [ 662.145322] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] self.force_reraise() [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] raise self.value [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] updated_port = self._update_port( [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] _ensure_no_port_binding_failure(port) [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] raise exception.PortBindingFailed(port_id=port['id']) [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] nova.exception.PortBindingFailed: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. [ 662.145642] env[61839]: ERROR nova.compute.manager [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] [ 662.145903] env[61839]: DEBUG nova.compute.utils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 662.146460] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.515s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.146629] env[61839]: DEBUG nova.objects.instance [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 662.149921] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Build of instance 5a130776-5e5f-4eec-8574-08aa1f1ef97a was re-scheduled: Binding failed for port e7dac565-024f-4a6b-8efc-d65497a1bbbf, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 662.150310] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 662.150599] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Acquiring lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.150757] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Acquired lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.150911] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.158566] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.158780] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.158929] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.159172] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.159405] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.159678] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.159908] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.161138] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.161138] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.161138] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.161138] env[61839]: DEBUG nova.virt.hardware [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.164085] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776791fb-c5f4-4902-b52c-29d7ab711622 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.176498] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531407e1-6fdd-4913-b64b-60564ac219c6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.252766] env[61839]: DEBUG nova.network.neutron [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.588609] env[61839]: INFO nova.compute.manager [-] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Took 1.04 seconds to deallocate network for instance. [ 662.590231] env[61839]: DEBUG nova.compute.claims [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 662.590231] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.693566] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.754348] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] Releasing lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.754681] env[61839]: DEBUG nova.compute.manager [req-2d190512-283d-4d11-9d1d-1aa6a763f8f2 req-30f4173e-df22-44f1-9e42-83de2603ec59 service nova] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Received event network-vif-deleted-64db97a0-6fce-45d1-904c-ce34410e38b0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 662.927053] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.166723] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7a0f4c7-63f7-438f-925b-3a0159c683d7 tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.168358] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.711s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.176237] env[61839]: DEBUG nova.objects.instance [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lazy-loading 'resources' on Instance uuid d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 663.340509] env[61839]: ERROR nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. [ 663.340509] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 663.340509] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 663.340509] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 663.340509] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 663.340509] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 663.340509] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 663.340509] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 663.340509] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 663.340509] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 663.340509] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 663.340509] env[61839]: ERROR nova.compute.manager raise self.value [ 663.340509] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 663.340509] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 663.340509] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 663.340509] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 663.341052] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 663.341052] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 663.341052] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. [ 663.341052] env[61839]: ERROR nova.compute.manager [ 663.341052] env[61839]: Traceback (most recent call last): [ 663.341052] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 663.341052] env[61839]: listener.cb(fileno) [ 663.341052] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 663.341052] env[61839]: result = function(*args, **kwargs) [ 663.341052] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 663.341052] env[61839]: return func(*args, **kwargs) [ 663.341052] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 663.341052] env[61839]: raise e [ 663.341052] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 663.341052] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 663.341052] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 663.341052] env[61839]: created_port_ids = self._update_ports_for_instance( [ 663.341052] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 663.341052] env[61839]: with excutils.save_and_reraise_exception(): [ 663.341052] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 663.341052] env[61839]: self.force_reraise() [ 663.341052] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 663.341052] env[61839]: raise self.value [ 663.341052] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 663.341052] env[61839]: updated_port = self._update_port( [ 663.341052] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 663.341052] env[61839]: _ensure_no_port_binding_failure(port) [ 663.341052] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 663.341052] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 663.341827] env[61839]: nova.exception.PortBindingFailed: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. [ 663.341827] env[61839]: Removing descriptor: 17 [ 663.341827] env[61839]: ERROR nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Traceback (most recent call last): [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] yield resources [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self.driver.spawn(context, instance, image_meta, [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 663.341827] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] vm_ref = self.build_virtual_machine(instance, [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] vif_infos = vmwarevif.get_vif_info(self._session, [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] for vif in network_info: [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return self._sync_wrapper(fn, *args, **kwargs) [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self.wait() [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self[:] = self._gt.wait() [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return self._exit_event.wait() [ 663.342165] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] result = hub.switch() [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return self.greenlet.switch() [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] result = function(*args, **kwargs) [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return func(*args, **kwargs) [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] raise e [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] nwinfo = self.network_api.allocate_for_instance( [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 663.342583] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] created_port_ids = self._update_ports_for_instance( [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] with excutils.save_and_reraise_exception(): [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self.force_reraise() [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] raise self.value [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] updated_port = self._update_port( [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] _ensure_no_port_binding_failure(port) [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 663.342941] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] raise exception.PortBindingFailed(port_id=port['id']) [ 663.343287] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] nova.exception.PortBindingFailed: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. [ 663.343287] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] [ 663.343287] env[61839]: INFO nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Terminating instance [ 663.344326] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Acquiring lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.344490] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Acquired lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.344663] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.431740] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Releasing lock "refresh_cache-5a130776-5e5f-4eec-8574-08aa1f1ef97a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.431740] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 663.431740] env[61839]: DEBUG nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 663.431740] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.451845] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.492612] env[61839]: DEBUG nova.compute.manager [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Received event network-changed-a1a17fb4-0f77-408c-9dd1-324dedc031a7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 663.492719] env[61839]: DEBUG nova.compute.manager [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Refreshing instance network info cache due to event network-changed-a1a17fb4-0f77-408c-9dd1-324dedc031a7. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 663.492929] env[61839]: DEBUG oslo_concurrency.lockutils [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] Acquiring lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.866263] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.948481] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "603191b6-a4b0-451b-b98b-f3dbfb684300" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.948726] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.953946] env[61839]: DEBUG nova.network.neutron [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.003373] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.153341] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c22699a-4cb0-48cb-8e75-777abff8f7d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.161154] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c459eb-b604-4147-a3e9-c8ac75e2f978 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.193129] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7678084-c81d-41aa-abc2-950cc4acde48 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.200708] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bdfb28-1dfa-4055-965f-e71f276fc506 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.214536] env[61839]: DEBUG nova.compute.provider_tree [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.458695] env[61839]: INFO nova.compute.manager [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] [instance: 5a130776-5e5f-4eec-8574-08aa1f1ef97a] Took 1.03 seconds to deallocate network for instance. [ 664.509035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Releasing lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.509392] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 664.509601] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.512947] env[61839]: DEBUG oslo_concurrency.lockutils [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] Acquired lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.512947] env[61839]: DEBUG nova.network.neutron [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Refreshing network info cache for port a1a17fb4-0f77-408c-9dd1-324dedc031a7 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.512947] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac9ca4fa-2afb-42e1-a7fe-325a4e3b7867 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.523291] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3710be5-ce90-4c73-ba79-148e41d96cf6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.547775] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2432a14e-ec45-452c-9592-de690dbc102e could not be found. [ 664.548025] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 664.548142] env[61839]: INFO nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 664.548379] env[61839]: DEBUG oslo.service.loopingcall [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.548578] env[61839]: DEBUG nova.compute.manager [-] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 664.548666] env[61839]: DEBUG nova.network.neutron [-] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 664.567558] env[61839]: DEBUG nova.network.neutron [-] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.718272] env[61839]: DEBUG nova.scheduler.client.report [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 665.049042] env[61839]: DEBUG nova.network.neutron [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.070497] env[61839]: DEBUG nova.network.neutron [-] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.179754] env[61839]: DEBUG nova.network.neutron [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.225576] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.056s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.227187] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.110s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.229027] env[61839]: INFO nova.compute.claims [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.253729] env[61839]: INFO nova.scheduler.client.report [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Deleted allocations for instance d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d [ 665.491403] env[61839]: INFO nova.scheduler.client.report [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Deleted allocations for instance 5a130776-5e5f-4eec-8574-08aa1f1ef97a [ 665.527702] env[61839]: DEBUG nova.compute.manager [req-1b195e70-3346-478d-a9cc-13cd79a2b755 req-f6f914fd-3584-4872-a740-60fa707cfa3a service nova] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Received event network-vif-deleted-a1a17fb4-0f77-408c-9dd1-324dedc031a7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 665.573729] env[61839]: INFO nova.compute.manager [-] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Took 1.02 seconds to deallocate network for instance. [ 665.575453] env[61839]: DEBUG nova.compute.claims [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 665.576664] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.683034] env[61839]: DEBUG oslo_concurrency.lockutils [req-d409f945-7909-44b2-9512-4eea3cb0cc71 req-2b560ba3-6873-4f6d-b760-7fb85ad7ae44 service nova] Releasing lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.765057] env[61839]: DEBUG oslo_concurrency.lockutils [None req-59f3a73e-ced5-4077-b911-de1be522459b tempest-ServerShowV254Test-864617854 tempest-ServerShowV254Test-864617854-project-member] Lock "d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.022s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.000224] env[61839]: DEBUG oslo_concurrency.lockutils [None req-758c0332-86e7-48d1-9a5c-4459947020cd tempest-AttachInterfacesV270Test-504327934 tempest-AttachInterfacesV270Test-504327934-project-member] Lock "5a130776-5e5f-4eec-8574-08aa1f1ef97a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.626s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.504435] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 666.645253] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59436727-25aa-4546-bea8-9491fc20332d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.653352] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7969387-0986-47f5-a76e-35750ac81423 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.690290] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a5accf-e942-42a7-acdb-d400161b3455 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.698272] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d6fda7-d348-4e1e-a3fc-086c389bfeb9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.713480] env[61839]: DEBUG nova.compute.provider_tree [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.027523] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.216014] env[61839]: DEBUG nova.scheduler.client.report [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.722981] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.723701] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 667.726476] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.639s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.727961] env[61839]: INFO nova.compute.claims [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.239022] env[61839]: DEBUG nova.compute.utils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 668.239022] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 668.239022] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.324259] env[61839]: DEBUG nova.policy [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2feb7fb1a09843d09a1cc4062783579a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20abb9e968aa4b77b0920f9d2ef300a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 668.700131] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Successfully created port: a387e3fb-f832-426b-9aa3-cc499d5fb2cc {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.747369] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 669.218650] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf15f81-6fc0-4e6c-b689-335c58da4b9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.228636] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead438b3-64ef-4f8c-9e0f-4f500f7aae58 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.265972] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fdcef5-229c-43e1-9f33-b35777a8ea38 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.275358] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6defde-d4f6-4e61-bca2-8bd90dce3ba6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.292168] env[61839]: DEBUG nova.compute.provider_tree [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.771291] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 669.795974] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 669.797029] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 669.797029] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.797029] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 669.797029] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.797029] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 669.797237] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 669.797450] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 669.797669] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 669.797865] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 669.799967] env[61839]: DEBUG nova.virt.hardware [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.799967] env[61839]: DEBUG nova.scheduler.client.report [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 669.802605] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0301cdc2-5218-46b0-b7cd-06e3bcee28b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.811119] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dbcf41-a72e-4a71-b456-4b4a07d9f44a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.033772] env[61839]: DEBUG nova.compute.manager [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Received event network-changed-a387e3fb-f832-426b-9aa3-cc499d5fb2cc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 670.034348] env[61839]: DEBUG nova.compute.manager [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Refreshing instance network info cache due to event network-changed-a387e3fb-f832-426b-9aa3-cc499d5fb2cc. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 670.037048] env[61839]: DEBUG oslo_concurrency.lockutils [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] Acquiring lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.037048] env[61839]: DEBUG oslo_concurrency.lockutils [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] Acquired lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.037048] env[61839]: DEBUG nova.network.neutron [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Refreshing network info cache for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.209469] env[61839]: ERROR nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. [ 670.209469] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 670.209469] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.209469] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 670.209469] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.209469] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 670.209469] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.209469] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 670.209469] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.209469] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 670.209469] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.209469] env[61839]: ERROR nova.compute.manager raise self.value [ 670.209469] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.209469] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 670.209469] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.209469] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 670.209901] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.209901] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 670.209901] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. [ 670.209901] env[61839]: ERROR nova.compute.manager [ 670.209901] env[61839]: Traceback (most recent call last): [ 670.209901] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 670.209901] env[61839]: listener.cb(fileno) [ 670.209901] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.209901] env[61839]: result = function(*args, **kwargs) [ 670.209901] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 670.209901] env[61839]: return func(*args, **kwargs) [ 670.209901] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.209901] env[61839]: raise e [ 670.209901] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.209901] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 670.209901] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.209901] env[61839]: created_port_ids = self._update_ports_for_instance( [ 670.209901] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.209901] env[61839]: with excutils.save_and_reraise_exception(): [ 670.209901] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.209901] env[61839]: self.force_reraise() [ 670.209901] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.209901] env[61839]: raise self.value [ 670.209901] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.209901] env[61839]: updated_port = self._update_port( [ 670.209901] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.209901] env[61839]: _ensure_no_port_binding_failure(port) [ 670.209901] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.209901] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 670.210694] env[61839]: nova.exception.PortBindingFailed: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. [ 670.210694] env[61839]: Removing descriptor: 17 [ 670.210694] env[61839]: ERROR nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Traceback (most recent call last): [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] yield resources [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self.driver.spawn(context, instance, image_meta, [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.210694] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] vm_ref = self.build_virtual_machine(instance, [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] for vif in network_info: [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return self._sync_wrapper(fn, *args, **kwargs) [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self.wait() [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self[:] = self._gt.wait() [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return self._exit_event.wait() [ 670.211247] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] result = hub.switch() [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return self.greenlet.switch() [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] result = function(*args, **kwargs) [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return func(*args, **kwargs) [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] raise e [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] nwinfo = self.network_api.allocate_for_instance( [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.211648] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] created_port_ids = self._update_ports_for_instance( [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] with excutils.save_and_reraise_exception(): [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self.force_reraise() [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] raise self.value [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] updated_port = self._update_port( [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] _ensure_no_port_binding_failure(port) [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.212016] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] raise exception.PortBindingFailed(port_id=port['id']) [ 670.212360] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] nova.exception.PortBindingFailed: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. [ 670.212360] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] [ 670.212360] env[61839]: INFO nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Terminating instance [ 670.213123] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Acquiring lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.306710] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.307562] env[61839]: DEBUG nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 670.310185] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 19.673s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.310371] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.310631] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 670.310918] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.219s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.312348] env[61839]: INFO nova.compute.claims [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.319919] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33fbb36-de5e-41d4-bb7e-30d758ad19c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.333998] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1811a31-a7aa-4878-95aa-9f5b048af9ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.349296] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c268c657-7fa8-45f6-966d-e7907d89d937 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.356879] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0514cf02-baa4-4107-8802-8f62a1f25424 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.391041] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181447MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 670.391238] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.556687] env[61839]: DEBUG nova.network.neutron [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.665576] env[61839]: DEBUG nova.network.neutron [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.820939] env[61839]: DEBUG nova.compute.utils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.822416] env[61839]: DEBUG nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Not allocating networking since 'none' was specified. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 670.830580] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.830807] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.168366] env[61839]: DEBUG oslo_concurrency.lockutils [req-eafb895f-e644-4eb9-b71a-1a9ba05e363c req-b1b7e618-e131-4620-ba7b-b7b377f7b481 service nova] Releasing lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.168868] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Acquired lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.169094] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.326021] env[61839]: DEBUG nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 671.689760] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859020e6-5d79-42f2-8e4d-8bbe4f0c46a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.697452] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8340c01-afcd-4787-9d18-c2f009ab8155 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.700972] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.729789] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a342c76-96e1-4268-9ffc-ef67f593d369 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.737255] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd0bdd4-853f-4e4f-a22d-0903fd438f60 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.750347] env[61839]: DEBUG nova.compute.provider_tree [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.805862] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.059184] env[61839]: DEBUG nova.compute.manager [req-48221d40-c42d-460b-95de-9091339621fc req-e3e1218c-a5c6-476b-8ad7-27c0bd856a0d service nova] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Received event network-vif-deleted-a387e3fb-f832-426b-9aa3-cc499d5fb2cc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 672.253285] env[61839]: DEBUG nova.scheduler.client.report [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.310642] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Releasing lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.311300] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 672.311518] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 672.311812] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92fe91bf-f15f-447a-bbc3-480f5617bd6c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.320836] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2664bdf-3429-4f48-acb2-c77dc155906f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.333773] env[61839]: DEBUG nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 672.347797] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e could not be found. [ 672.348028] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 672.348216] env[61839]: INFO nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 672.348460] env[61839]: DEBUG oslo.service.loopingcall [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.348676] env[61839]: DEBUG nova.compute.manager [-] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 672.348770] env[61839]: DEBUG nova.network.neutron [-] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.358420] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 672.358647] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 672.358805] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.358985] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 672.359143] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.359287] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 672.359516] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 672.359656] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 672.359787] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 672.359946] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 672.360134] env[61839]: DEBUG nova.virt.hardware [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.360939] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc1f2d2-3677-4ee0-af9d-a1f10b09df3d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.365475] env[61839]: DEBUG nova.network.neutron [-] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.369894] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1804b62-d073-470f-917c-db0fcc87c530 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.383920] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.389312] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Creating folder: Project (1e1a985d58c047b282fc28356d0ec61b). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.389791] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01c983cd-3a4a-4e02-9d77-db40373648c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.399983] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Created folder: Project (1e1a985d58c047b282fc28356d0ec61b) in parent group-v281288. [ 672.400180] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Creating folder: Instances. Parent ref: group-v281305. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.400403] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d24c3b9a-c84d-4b6d-849b-e2e8924a3a45 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.408704] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Created folder: Instances in parent group-v281305. [ 672.408921] env[61839]: DEBUG oslo.service.loopingcall [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.409104] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 672.409292] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-907f8a57-1354-4c84-bbb1-66778dab1128 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.424761] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.424761] env[61839]: value = "task-1314293" [ 672.424761] env[61839]: _type = "Task" [ 672.424761] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.431745] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314293, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.758511] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.759081] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 672.761857] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.673s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.763370] env[61839]: INFO nova.compute.claims [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.870117] env[61839]: DEBUG nova.network.neutron [-] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.934248] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314293, 'name': CreateVM_Task, 'duration_secs': 0.236111} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.934416] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.934829] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.934989] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.935332] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 672.935581] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03e0ed63-5bf6-41e4-aeb9-08883dc66003 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.939731] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 672.939731] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521a51d5-ac4f-f1b9-2c18-dfacdb8ab5da" [ 672.939731] env[61839]: _type = "Task" [ 672.939731] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.949176] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521a51d5-ac4f-f1b9-2c18-dfacdb8ab5da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.267881] env[61839]: DEBUG nova.compute.utils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 673.271299] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 673.271466] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.316629] env[61839]: DEBUG nova.policy [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e02535785d2478daca27b6824f3ec4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e090804912747d1bd128ca51b9e3c5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 673.372653] env[61839]: INFO nova.compute.manager [-] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Took 1.02 seconds to deallocate network for instance. [ 673.375616] env[61839]: DEBUG nova.compute.claims [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 673.375794] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.452031] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521a51d5-ac4f-f1b9-2c18-dfacdb8ab5da, 'name': SearchDatastore_Task, 'duration_secs': 0.00843} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.452031] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.452031] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.452031] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.452223] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.452650] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.453242] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e5b67d9-aa02-4347-ad7a-d39612a1c81f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.464330] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.464330] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.464330] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e9e46b-ac17-4f4b-8b81-2ef1c4398d19 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.468222] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 673.468222] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5213eb76-2db2-e4e8-6bd0-557ec51be8d0" [ 673.468222] env[61839]: _type = "Task" [ 673.468222] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.476550] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5213eb76-2db2-e4e8-6bd0-557ec51be8d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.730661] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Successfully created port: 7ae9091f-df7a-4c82-90df-81b17f49b944 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.772789] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 673.979062] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5213eb76-2db2-e4e8-6bd0-557ec51be8d0, 'name': SearchDatastore_Task, 'duration_secs': 0.00784} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.979834] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a65fd1fd-3119-4e14-8902-c8689b8d8602 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.986186] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 673.986186] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526454e0-ae75-b9f6-b407-998612a6e3d3" [ 673.986186] env[61839]: _type = "Task" [ 673.986186] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.997808] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526454e0-ae75-b9f6-b407-998612a6e3d3, 'name': SearchDatastore_Task, 'duration_secs': 0.009126} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.998775] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.998775] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 673.998775] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef275065-a5af-48bb-a73b-36a78285d94a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.004875] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 674.004875] env[61839]: value = "task-1314294" [ 674.004875] env[61839]: _type = "Task" [ 674.004875] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.012374] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.183764] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf99626a-0de2-4823-adba-0fec7657dd0f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.197119] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb21af48-0994-4948-bb16-f25b88611f76 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.238047] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fe796e-b1be-4966-948b-bf724647506c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.248178] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a000268-98de-4c5e-b60b-a9f3732081f9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.264345] env[61839]: DEBUG nova.compute.provider_tree [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.514930] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480271} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.515452] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 674.515718] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 674.516015] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-754efdcb-6f15-41c2-92bf-3c74621de4b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.521832] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 674.521832] env[61839]: value = "task-1314295" [ 674.521832] env[61839]: _type = "Task" [ 674.521832] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.530223] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314295, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.772961] env[61839]: DEBUG nova.scheduler.client.report [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.785565] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 674.824923] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 674.825194] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 674.825345] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.825520] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 674.825669] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.825980] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 674.826160] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 674.826320] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 674.826542] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 674.826726] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 674.827339] env[61839]: DEBUG nova.virt.hardware [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 674.828208] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849b4130-fd3b-4c2c-8ee3-7ab968b7eea8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.837363] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c98848-e71e-4f62-97a0-b8a911f68f2d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.997860] env[61839]: DEBUG nova.compute.manager [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Received event network-changed-7ae9091f-df7a-4c82-90df-81b17f49b944 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 674.998082] env[61839]: DEBUG nova.compute.manager [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Refreshing instance network info cache due to event network-changed-7ae9091f-df7a-4c82-90df-81b17f49b944. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 674.998344] env[61839]: DEBUG oslo_concurrency.lockutils [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] Acquiring lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.998444] env[61839]: DEBUG oslo_concurrency.lockutils [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] Acquired lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.998634] env[61839]: DEBUG nova.network.neutron [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Refreshing network info cache for port 7ae9091f-df7a-4c82-90df-81b17f49b944 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.031372] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314295, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057021} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.031866] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.032697] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e62656-1f83-476e-8866-dd7bc9e127ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.052700] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.052901] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0106efbc-ec86-4a1b-9bdb-db51e7c9a7e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.073125] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 675.073125] env[61839]: value = "task-1314296" [ 675.073125] env[61839]: _type = "Task" [ 675.073125] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.080681] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314296, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.138862] env[61839]: ERROR nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. [ 675.138862] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 675.138862] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.138862] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 675.138862] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.138862] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 675.138862] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.138862] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 675.138862] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.138862] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 675.138862] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.138862] env[61839]: ERROR nova.compute.manager raise self.value [ 675.138862] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.138862] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 675.138862] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.138862] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 675.139339] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.139339] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 675.139339] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. [ 675.139339] env[61839]: ERROR nova.compute.manager [ 675.139339] env[61839]: Traceback (most recent call last): [ 675.139339] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 675.139339] env[61839]: listener.cb(fileno) [ 675.139339] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 675.139339] env[61839]: result = function(*args, **kwargs) [ 675.139339] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 675.139339] env[61839]: return func(*args, **kwargs) [ 675.139339] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 675.139339] env[61839]: raise e [ 675.139339] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.139339] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 675.139339] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.139339] env[61839]: created_port_ids = self._update_ports_for_instance( [ 675.139339] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.139339] env[61839]: with excutils.save_and_reraise_exception(): [ 675.139339] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.139339] env[61839]: self.force_reraise() [ 675.139339] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.139339] env[61839]: raise self.value [ 675.139339] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.139339] env[61839]: updated_port = self._update_port( [ 675.139339] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.139339] env[61839]: _ensure_no_port_binding_failure(port) [ 675.139339] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.139339] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 675.140501] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. [ 675.140501] env[61839]: Removing descriptor: 17 [ 675.140501] env[61839]: ERROR nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Traceback (most recent call last): [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] yield resources [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self.driver.spawn(context, instance, image_meta, [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 675.140501] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] vm_ref = self.build_virtual_machine(instance, [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] for vif in network_info: [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return self._sync_wrapper(fn, *args, **kwargs) [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self.wait() [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self[:] = self._gt.wait() [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return self._exit_event.wait() [ 675.141185] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] result = hub.switch() [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return self.greenlet.switch() [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] result = function(*args, **kwargs) [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return func(*args, **kwargs) [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] raise e [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] nwinfo = self.network_api.allocate_for_instance( [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.141695] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] created_port_ids = self._update_ports_for_instance( [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] with excutils.save_and_reraise_exception(): [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self.force_reraise() [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] raise self.value [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] updated_port = self._update_port( [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] _ensure_no_port_binding_failure(port) [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.142291] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] raise exception.PortBindingFailed(port_id=port['id']) [ 675.142816] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] nova.exception.PortBindingFailed: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. [ 675.142816] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] [ 675.142816] env[61839]: INFO nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Terminating instance [ 675.142816] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Acquiring lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.277765] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.278312] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 675.281393] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.102s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.283397] env[61839]: INFO nova.compute.claims [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.514720] env[61839]: DEBUG nova.network.neutron [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 675.583178] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314296, 'name': ReconfigVM_Task, 'duration_secs': 0.266477} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.583534] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 675.584050] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bd30d23-d6b1-4869-a4c7-437defca4662 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.589518] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 675.589518] env[61839]: value = "task-1314297" [ 675.589518] env[61839]: _type = "Task" [ 675.589518] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.596947] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314297, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.604777] env[61839]: DEBUG nova.network.neutron [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.787427] env[61839]: DEBUG nova.compute.utils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 675.792793] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 675.792793] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 675.844780] env[61839]: DEBUG nova.policy [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56b7564457f14d4787bf0974541c7e73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3b7f465364742f28ebd5937941e4589', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 676.100934] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314297, 'name': Rename_Task, 'duration_secs': 0.188126} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.101239] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.101497] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a88b7b0-399d-475a-a77b-0aab75273ed7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.107411] env[61839]: DEBUG oslo_concurrency.lockutils [req-5d4c341e-9114-4258-8ced-866631a71433 req-8532419d-74fa-48ff-a4f1-159f6ddab76a service nova] Releasing lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.108899] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Acquired lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.109379] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 676.110369] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 676.110369] env[61839]: value = "task-1314298" [ 676.110369] env[61839]: _type = "Task" [ 676.110369] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.122788] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314298, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.261788] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Successfully created port: 848a80b9-d429-4435-98ee-44663831bb58 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.293377] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 676.623076] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314298, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.641343] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.778249] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1ab325-ffc0-4671-b17a-25c085efe0e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.786708] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b77f727-e163-43b8-90b3-658bf8121812 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.821966] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf26bf4-fbd2-4a1a-a348-912e4ec99fb7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.830332] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653071bb-8c91-4051-9905-070c5f4a05ee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.844160] env[61839]: DEBUG nova.compute.provider_tree [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.848429] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.045198] env[61839]: DEBUG nova.compute.manager [req-d6a3281a-2f0e-4c8e-949c-c26ac6d8eb8d req-9b726d12-33f1-4ddf-a30b-d4bb38881f09 service nova] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Received event network-vif-deleted-7ae9091f-df7a-4c82-90df-81b17f49b944 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 677.132023] env[61839]: DEBUG oslo_vmware.api [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314298, 'name': PowerOnVM_Task, 'duration_secs': 0.633455} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.132023] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 677.132023] env[61839]: INFO nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Took 4.80 seconds to spawn the instance on the hypervisor. [ 677.132023] env[61839]: DEBUG nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 677.132023] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2f34b1-923b-429b-a13b-9b7576cbf98f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.329477] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 677.348884] env[61839]: DEBUG nova.scheduler.client.report [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.356392] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Releasing lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.356392] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 677.356392] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 677.356943] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-470edfd2-06bc-4778-8d10-7ad837beced5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.366165] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8828ad9c-26a6-476d-8733-f15676fa7a44 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.382062] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 677.382062] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 677.382062] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.382299] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 677.382299] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.382299] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 677.382299] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 677.382299] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 677.382761] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 677.382761] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 677.383181] env[61839]: DEBUG nova.virt.hardware [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 677.384335] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f7f7e5-e653-4e99-a6cb-d2b0c336df8a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.393254] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4e223f-50f1-4e6c-84b8-874bd85c7fea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.404589] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2619b21e-084f-4003-af13-80382bfb1e2f could not be found. [ 677.404962] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 677.405033] env[61839]: INFO nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 677.405304] env[61839]: DEBUG oslo.service.loopingcall [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 677.405870] env[61839]: DEBUG nova.compute.manager [-] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 677.405967] env[61839]: DEBUG nova.network.neutron [-] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 677.435508] env[61839]: DEBUG nova.network.neutron [-] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.651108] env[61839]: INFO nova.compute.manager [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Took 28.58 seconds to build instance. [ 677.750541] env[61839]: ERROR nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. [ 677.750541] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 677.750541] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.750541] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 677.750541] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.750541] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 677.750541] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.750541] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 677.750541] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.750541] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 677.750541] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.750541] env[61839]: ERROR nova.compute.manager raise self.value [ 677.750541] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.750541] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 677.750541] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.750541] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 677.751505] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.751505] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 677.751505] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. [ 677.751505] env[61839]: ERROR nova.compute.manager [ 677.751505] env[61839]: Traceback (most recent call last): [ 677.751505] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 677.751505] env[61839]: listener.cb(fileno) [ 677.751505] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.751505] env[61839]: result = function(*args, **kwargs) [ 677.751505] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 677.751505] env[61839]: return func(*args, **kwargs) [ 677.751505] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.751505] env[61839]: raise e [ 677.751505] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.751505] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 677.751505] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.751505] env[61839]: created_port_ids = self._update_ports_for_instance( [ 677.751505] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.751505] env[61839]: with excutils.save_and_reraise_exception(): [ 677.751505] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.751505] env[61839]: self.force_reraise() [ 677.751505] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.751505] env[61839]: raise self.value [ 677.751505] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.751505] env[61839]: updated_port = self._update_port( [ 677.751505] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.751505] env[61839]: _ensure_no_port_binding_failure(port) [ 677.751505] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.751505] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 677.752358] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. [ 677.752358] env[61839]: Removing descriptor: 17 [ 677.752358] env[61839]: ERROR nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Traceback (most recent call last): [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] yield resources [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self.driver.spawn(context, instance, image_meta, [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.752358] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] vm_ref = self.build_virtual_machine(instance, [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] for vif in network_info: [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return self._sync_wrapper(fn, *args, **kwargs) [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self.wait() [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self[:] = self._gt.wait() [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return self._exit_event.wait() [ 677.753239] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] result = hub.switch() [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return self.greenlet.switch() [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] result = function(*args, **kwargs) [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return func(*args, **kwargs) [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] raise e [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] nwinfo = self.network_api.allocate_for_instance( [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.753640] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] created_port_ids = self._update_ports_for_instance( [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] with excutils.save_and_reraise_exception(): [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self.force_reraise() [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] raise self.value [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] updated_port = self._update_port( [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] _ensure_no_port_binding_failure(port) [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.754025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] raise exception.PortBindingFailed(port_id=port['id']) [ 677.754437] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] nova.exception.PortBindingFailed: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. [ 677.754437] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] [ 677.754437] env[61839]: INFO nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Terminating instance [ 677.755289] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Acquiring lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.755458] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Acquired lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.755661] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.858462] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.858983] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 677.861882] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.676s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.938024] env[61839]: DEBUG nova.network.neutron [-] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.993077] env[61839]: INFO nova.compute.manager [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Rebuilding instance [ 678.037808] env[61839]: DEBUG nova.compute.manager [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 678.038343] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca37dc74-3d35-4d3d-ac44-75955b41380b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.152802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06f607c6-5c8c-4086-84f6-74587be86126 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "81ba4888-4b21-410f-ab86-a3068995836f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.461s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.271482] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.366130] env[61839]: DEBUG nova.compute.utils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 678.371714] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.374050] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 678.374050] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.411848] env[61839]: DEBUG nova.policy [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8677a31386a54087b2328734c2eadeb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8935bcc7ee644cb7a2a33557a708189c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 678.440541] env[61839]: INFO nova.compute.manager [-] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Took 1.03 seconds to deallocate network for instance. [ 678.443481] env[61839]: DEBUG nova.compute.claims [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 678.443717] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.548896] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 678.549176] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f014faa7-224f-4143-a034-bc530af69fe6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.563617] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 678.563617] env[61839]: value = "task-1314299" [ 678.563617] env[61839]: _type = "Task" [ 678.563617] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.576938] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.654917] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 678.771096] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a5f1de-fae6-4eaf-86cc-77c9c2329c93 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.779391] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57990a74-da69-4de8-a018-2a2a36d93216 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.809224] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Successfully created port: 7edfbbec-8cb7-4e20-ac8f-419464c8aac9 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 678.811720] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb2ccdd-c32c-4d20-8b70-e2f70866cc88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.819972] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40798f86-a57c-45b4-9513-1140861740b6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.834201] env[61839]: DEBUG nova.compute.provider_tree [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.875019] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 678.875019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Releasing lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.875577] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 678.875882] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 678.876672] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9186dae3-2a4a-4702-862f-7ecc567ddd57 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.886744] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c26836-1959-4268-9aeb-4e03f94162f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.911158] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0ab450ab-6416-464d-8140-a8c320abb69c could not be found. [ 678.911555] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 678.911848] env[61839]: INFO nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 678.912211] env[61839]: DEBUG oslo.service.loopingcall [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 678.912975] env[61839]: DEBUG nova.compute.manager [-] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 678.913186] env[61839]: DEBUG nova.network.neutron [-] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 678.940421] env[61839]: DEBUG nova.network.neutron [-] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.075221] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314299, 'name': PowerOffVM_Task, 'duration_secs': 0.115569} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.075510] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 679.075757] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.076539] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7eea8d-4ec5-4843-a879-7cdfade560ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.084044] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 679.084314] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbbae310-5b85-4211-9c0e-f2f9eefd7525 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.093631] env[61839]: DEBUG nova.compute.manager [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Received event network-changed-848a80b9-d429-4435-98ee-44663831bb58 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.094067] env[61839]: DEBUG nova.compute.manager [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Refreshing instance network info cache due to event network-changed-848a80b9-d429-4435-98ee-44663831bb58. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 679.094067] env[61839]: DEBUG oslo_concurrency.lockutils [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] Acquiring lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.094211] env[61839]: DEBUG oslo_concurrency.lockutils [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] Acquired lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.094341] env[61839]: DEBUG nova.network.neutron [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Refreshing network info cache for port 848a80b9-d429-4435-98ee-44663831bb58 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 679.111567] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 679.111780] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 679.111956] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Deleting the datastore file [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 679.112231] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06417899-b1dd-4ca2-bf5d-aff979e8bfdb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.119874] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 679.119874] env[61839]: value = "task-1314301" [ 679.119874] env[61839]: _type = "Task" [ 679.119874] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.128383] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.181947] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.338054] env[61839]: DEBUG nova.scheduler.client.report [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.441711] env[61839]: DEBUG nova.network.neutron [-] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.624769] env[61839]: DEBUG nova.network.neutron [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.632105] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084598} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.632383] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 679.632564] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 679.632773] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.793866] env[61839]: DEBUG nova.network.neutron [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.843866] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.981s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.843866] env[61839]: ERROR nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. [ 679.843866] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] Traceback (most recent call last): [ 679.843866] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 679.843866] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self.driver.spawn(context, instance, image_meta, [ 679.843866] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 679.843866] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.843866] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.843866] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] vm_ref = self.build_virtual_machine(instance, [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] for vif in network_info: [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return self._sync_wrapper(fn, *args, **kwargs) [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self.wait() [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self[:] = self._gt.wait() [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return self._exit_event.wait() [ 679.844170] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] result = hub.switch() [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return self.greenlet.switch() [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] result = function(*args, **kwargs) [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] return func(*args, **kwargs) [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] raise e [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] nwinfo = self.network_api.allocate_for_instance( [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 679.844546] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] created_port_ids = self._update_ports_for_instance( [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] with excutils.save_and_reraise_exception(): [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] self.force_reraise() [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] raise self.value [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] updated_port = self._update_port( [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] _ensure_no_port_binding_failure(port) [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.844945] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] raise exception.PortBindingFailed(port_id=port['id']) [ 679.845258] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] nova.exception.PortBindingFailed: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. [ 679.845258] env[61839]: ERROR nova.compute.manager [instance: 328882b4-d1af-4036-b313-ecada7d53899] [ 679.845258] env[61839]: DEBUG nova.compute.utils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 679.845375] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.992s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.846910] env[61839]: INFO nova.compute.claims [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.849480] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Build of instance 328882b4-d1af-4036-b313-ecada7d53899 was re-scheduled: Binding failed for port 6ebbd250-cd9f-40d5-a18c-b3210da6abe9, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 679.849881] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 679.850097] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Acquiring lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.850410] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Acquired lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.850410] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.888769] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 679.920056] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 679.920394] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 679.920631] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.921028] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 679.921166] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.921377] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 679.921643] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 679.921859] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 679.922093] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 679.922319] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 679.922550] env[61839]: DEBUG nova.virt.hardware [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 679.923465] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82d86bb-1331-4893-9078-9661e2b33a9d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.932176] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9507f5-d7cf-47f4-ac6a-3e40d3790785 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.948882] env[61839]: INFO nova.compute.manager [-] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Took 1.04 seconds to deallocate network for instance. [ 679.953428] env[61839]: DEBUG nova.compute.claims [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 679.953705] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.297013] env[61839]: DEBUG oslo_concurrency.lockutils [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] Releasing lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.297316] env[61839]: DEBUG nova.compute.manager [req-08274a9e-33fc-40a7-be18-04d2ee915305 req-9d88ba02-f367-4086-99cc-2d287946e572 service nova] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Received event network-vif-deleted-848a80b9-d429-4435-98ee-44663831bb58 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.310256] env[61839]: ERROR nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. [ 680.310256] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 680.310256] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.310256] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 680.310256] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.310256] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 680.310256] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.310256] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 680.310256] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.310256] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 680.310256] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.310256] env[61839]: ERROR nova.compute.manager raise self.value [ 680.310256] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.310256] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 680.310256] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.310256] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 680.310775] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.310775] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 680.310775] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. [ 680.310775] env[61839]: ERROR nova.compute.manager [ 680.310775] env[61839]: Traceback (most recent call last): [ 680.310775] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 680.310775] env[61839]: listener.cb(fileno) [ 680.310775] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.310775] env[61839]: result = function(*args, **kwargs) [ 680.310775] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.310775] env[61839]: return func(*args, **kwargs) [ 680.310775] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.310775] env[61839]: raise e [ 680.310775] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.310775] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 680.310775] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.310775] env[61839]: created_port_ids = self._update_ports_for_instance( [ 680.310775] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.310775] env[61839]: with excutils.save_and_reraise_exception(): [ 680.310775] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.310775] env[61839]: self.force_reraise() [ 680.310775] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.310775] env[61839]: raise self.value [ 680.310775] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.310775] env[61839]: updated_port = self._update_port( [ 680.310775] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.310775] env[61839]: _ensure_no_port_binding_failure(port) [ 680.310775] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.310775] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 680.311658] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. [ 680.311658] env[61839]: Removing descriptor: 17 [ 680.311658] env[61839]: ERROR nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Traceback (most recent call last): [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] yield resources [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self.driver.spawn(context, instance, image_meta, [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 680.311658] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] vm_ref = self.build_virtual_machine(instance, [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] for vif in network_info: [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return self._sync_wrapper(fn, *args, **kwargs) [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self.wait() [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self[:] = self._gt.wait() [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return self._exit_event.wait() [ 680.312025] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] result = hub.switch() [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return self.greenlet.switch() [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] result = function(*args, **kwargs) [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return func(*args, **kwargs) [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] raise e [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] nwinfo = self.network_api.allocate_for_instance( [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.312490] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] created_port_ids = self._update_ports_for_instance( [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] with excutils.save_and_reraise_exception(): [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self.force_reraise() [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] raise self.value [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] updated_port = self._update_port( [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] _ensure_no_port_binding_failure(port) [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.312902] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] raise exception.PortBindingFailed(port_id=port['id']) [ 680.313264] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] nova.exception.PortBindingFailed: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. [ 680.313264] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] [ 680.313264] env[61839]: INFO nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Terminating instance [ 680.313363] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.313519] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.313696] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.373916] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.440206] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.662039] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 680.662224] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 680.662382] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.662560] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 680.662747] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.662904] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 680.663128] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 680.663286] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 680.663451] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 680.663612] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 680.663818] env[61839]: DEBUG nova.virt.hardware [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 680.664680] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5d4df9-b543-4cc3-a8be-be550730b231 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.672367] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b805d279-b17e-4032-b81a-9f769cf50368 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.685431] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.690880] env[61839]: DEBUG oslo.service.loopingcall [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.691113] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.691316] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-090ebbd0-dc13-4813-9636-c699a200c139 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.707688] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.707688] env[61839]: value = "task-1314302" [ 680.707688] env[61839]: _type = "Task" [ 680.707688] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.714812] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314302, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.839977] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.943435] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Releasing lock "refresh_cache-328882b4-d1af-4036-b313-ecada7d53899" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.943695] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 680.943899] env[61839]: DEBUG nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.944084] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 680.947221] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.971680] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.149951] env[61839]: DEBUG nova.compute.manager [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Received event network-changed-7edfbbec-8cb7-4e20-ac8f-419464c8aac9 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 681.150228] env[61839]: DEBUG nova.compute.manager [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Refreshing instance network info cache due to event network-changed-7edfbbec-8cb7-4e20-ac8f-419464c8aac9. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 681.150372] env[61839]: DEBUG oslo_concurrency.lockutils [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] Acquiring lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.220550] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314302, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.247487] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44759a1f-6dcd-4bc5-a26e-dfb09c85bfde {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.255310] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9f5b9b-c9a8-48a4-ae3b-439e20cdd808 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.286257] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968d3c72-3421-4a9f-b560-ff329def631a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.293626] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f864f4a-f9e7-47fd-b3b7-6fe55347895c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.306544] env[61839]: DEBUG nova.compute.provider_tree [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.451915] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.452362] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 681.452552] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 681.453028] env[61839]: DEBUG oslo_concurrency.lockutils [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] Acquired lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.453226] env[61839]: DEBUG nova.network.neutron [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Refreshing network info cache for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.454576] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-750e7be2-1b93-431e-8458-e9358f3a3337 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.463770] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aba7270-e7cc-49aa-9c11-277b88f2b814 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.473954] env[61839]: DEBUG nova.network.neutron [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.485607] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ea7af26-14b2-4371-a4f4-48afc190d4bc could not be found. [ 681.485828] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 681.486014] env[61839]: INFO nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Took 0.03 seconds to destroy the instance on the hypervisor. [ 681.486250] env[61839]: DEBUG oslo.service.loopingcall [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 681.486866] env[61839]: DEBUG nova.compute.manager [-] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 681.486958] env[61839]: DEBUG nova.network.neutron [-] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.500644] env[61839]: DEBUG nova.network.neutron [-] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.719438] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314302, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.809248] env[61839]: DEBUG nova.scheduler.client.report [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 681.975903] env[61839]: INFO nova.compute.manager [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] [instance: 328882b4-d1af-4036-b313-ecada7d53899] Took 1.03 seconds to deallocate network for instance. [ 681.980135] env[61839]: DEBUG nova.network.neutron [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.003296] env[61839]: DEBUG nova.network.neutron [-] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.071392] env[61839]: DEBUG nova.network.neutron [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.221239] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314302, 'name': CreateVM_Task, 'duration_secs': 1.256144} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.221349] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 682.221667] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.221824] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.222156] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 682.222402] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46821b57-303f-4808-9519-4255ad81b7a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.227130] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 682.227130] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b9917-c733-e87e-b598-5105387cd872" [ 682.227130] env[61839]: _type = "Task" [ 682.227130] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.236426] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b9917-c733-e87e-b598-5105387cd872, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.314097] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.314630] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 682.317236] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.727s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.506936] env[61839]: INFO nova.compute.manager [-] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Took 1.02 seconds to deallocate network for instance. [ 682.509254] env[61839]: DEBUG nova.compute.claims [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 682.509438] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.573974] env[61839]: DEBUG oslo_concurrency.lockutils [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] Releasing lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.574250] env[61839]: DEBUG nova.compute.manager [req-15d959de-cfe2-473b-8d4b-b60e2a028e9f req-8e70d2c3-6def-4cfa-b8af-610a474c1cb7 service nova] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Received event network-vif-deleted-7edfbbec-8cb7-4e20-ac8f-419464c8aac9 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 682.739548] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b9917-c733-e87e-b598-5105387cd872, 'name': SearchDatastore_Task, 'duration_secs': 0.009244} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.739855] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.740104] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 682.740341] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.740500] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.740700] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 682.740959] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e7311f3-2521-4ff7-ad82-b23135a44f3c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.749279] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 682.749496] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 682.750326] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9aad5d6-4855-42cf-ba19-d63f392c4abe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.755474] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 682.755474] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52af390d-a36a-50e0-ac35-08a585c27df1" [ 682.755474] env[61839]: _type = "Task" [ 682.755474] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.763917] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52af390d-a36a-50e0-ac35-08a585c27df1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.821897] env[61839]: DEBUG nova.compute.utils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 682.826446] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 682.826541] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 682.865588] env[61839]: DEBUG nova.policy [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '083be6af909348d6948c76de46cea7fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2635da1dc3bf4b31be0d80ce6e01dd0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 683.018091] env[61839]: INFO nova.scheduler.client.report [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Deleted allocations for instance 328882b4-d1af-4036-b313-ecada7d53899 [ 683.256388] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e55b08b-31bc-4226-ad52-da535d162301 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.269969] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52af390d-a36a-50e0-ac35-08a585c27df1, 'name': SearchDatastore_Task, 'duration_secs': 0.00812} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.272294] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Successfully created port: 06269c90-ada4-4d3c-9956-7781981d3128 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.274663] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d6c6fa3-afc0-4c9f-9a39-f1fe863a5534 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.277438] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd1b0ce-42fe-4d76-81a7-a73caa5bdbdc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.285193] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 683.285193] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d77716-6281-dd7c-b75f-c4229b72ab8f" [ 683.285193] env[61839]: _type = "Task" [ 683.285193] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.316030] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1282faf-c55e-4c6a-ad4f-d992fc997b7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.323930] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d77716-6281-dd7c-b75f-c4229b72ab8f, 'name': SearchDatastore_Task, 'duration_secs': 0.009744} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.326032] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.326317] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 683.326833] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 683.329366] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b553036-bf76-4dfe-a038-9d1d584fadcd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.332060] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f04943-7061-45bc-9572-cd8a026677a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.346106] env[61839]: DEBUG nova.compute.provider_tree [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.348715] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 683.348715] env[61839]: value = "task-1314303" [ 683.348715] env[61839]: _type = "Task" [ 683.348715] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.356243] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.531442] env[61839]: DEBUG oslo_concurrency.lockutils [None req-76059687-6f91-475a-bfaf-52787dc520ba tempest-AttachInterfacesUnderV243Test-1125158786 tempest-AttachInterfacesUnderV243Test-1125158786-project-member] Lock "328882b4-d1af-4036-b313-ecada7d53899" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.544s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.849869] env[61839]: DEBUG nova.scheduler.client.report [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.863652] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47867} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.863652] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.863652] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.863652] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ecdcd8d-3595-4483-8ddb-4a30fa102113 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.870191] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 683.870191] env[61839]: value = "task-1314304" [ 683.870191] env[61839]: _type = "Task" [ 683.870191] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.881067] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314304, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.032046] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 684.334481] env[61839]: DEBUG nova.compute.manager [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Received event network-changed-06269c90-ada4-4d3c-9956-7781981d3128 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 684.337944] env[61839]: DEBUG nova.compute.manager [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Refreshing instance network info cache due to event network-changed-06269c90-ada4-4d3c-9956-7781981d3128. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 684.337944] env[61839]: DEBUG oslo_concurrency.lockutils [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] Acquiring lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.337944] env[61839]: DEBUG oslo_concurrency.lockutils [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] Acquired lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.337944] env[61839]: DEBUG nova.network.neutron [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Refreshing network info cache for port 06269c90-ada4-4d3c-9956-7781981d3128 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.341213] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 684.357919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.041s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.358599] env[61839]: ERROR nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Traceback (most recent call last): [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self.driver.spawn(context, instance, image_meta, [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] vm_ref = self.build_virtual_machine(instance, [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.358599] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] for vif in network_info: [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return self._sync_wrapper(fn, *args, **kwargs) [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self.wait() [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self[:] = self._gt.wait() [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return self._exit_event.wait() [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] result = hub.switch() [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.358956] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return self.greenlet.switch() [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] result = function(*args, **kwargs) [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] return func(*args, **kwargs) [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] raise e [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] nwinfo = self.network_api.allocate_for_instance( [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] created_port_ids = self._update_ports_for_instance( [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] with excutils.save_and_reraise_exception(): [ 684.359280] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] self.force_reraise() [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] raise self.value [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] updated_port = self._update_port( [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] _ensure_no_port_binding_failure(port) [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] raise exception.PortBindingFailed(port_id=port['id']) [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] nova.exception.PortBindingFailed: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. [ 684.359651] env[61839]: ERROR nova.compute.manager [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] [ 684.359971] env[61839]: DEBUG nova.compute.utils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 684.361264] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.785s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.367257] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Build of instance 2670f16e-4c44-4b88-937e-9e491f599acb was re-scheduled: Binding failed for port 64db97a0-6fce-45d1-904c-ce34410e38b0, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 684.367734] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 684.368089] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Acquiring lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.368997] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Acquired lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.369226] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.384435] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314304, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06817} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.387355] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 684.387355] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 684.387355] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.388687] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 684.388845] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.388999] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 684.389233] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 684.389395] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 684.389558] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 684.389716] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 684.389886] env[61839]: DEBUG nova.virt.hardware [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 684.390201] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 684.391243] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d95a22-bf32-4876-8d15-cf5107e8ff7e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.394461] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280b7ab7-e615-414d-87b5-000cb39cde4d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.411447] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014c6120-da7d-4612-b9eb-1f583cb63cbc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.425947] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 684.425947] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-041c44c1-9c43-4bca-96b0-88b352dfe091 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.451173] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 684.451173] env[61839]: value = "task-1314305" [ 684.451173] env[61839]: _type = "Task" [ 684.451173] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.458713] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314305, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.500855] env[61839]: ERROR nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. [ 684.500855] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 684.500855] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.500855] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 684.500855] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.500855] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 684.500855] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.500855] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 684.500855] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.500855] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 684.500855] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.500855] env[61839]: ERROR nova.compute.manager raise self.value [ 684.500855] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.500855] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 684.500855] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.500855] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 684.501450] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.501450] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 684.501450] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. [ 684.501450] env[61839]: ERROR nova.compute.manager [ 684.501450] env[61839]: Traceback (most recent call last): [ 684.501450] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 684.501450] env[61839]: listener.cb(fileno) [ 684.501450] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.501450] env[61839]: result = function(*args, **kwargs) [ 684.501450] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 684.501450] env[61839]: return func(*args, **kwargs) [ 684.501450] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.501450] env[61839]: raise e [ 684.501450] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.501450] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 684.501450] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.501450] env[61839]: created_port_ids = self._update_ports_for_instance( [ 684.501450] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.501450] env[61839]: with excutils.save_and_reraise_exception(): [ 684.501450] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.501450] env[61839]: self.force_reraise() [ 684.501450] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.501450] env[61839]: raise self.value [ 684.501450] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.501450] env[61839]: updated_port = self._update_port( [ 684.501450] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.501450] env[61839]: _ensure_no_port_binding_failure(port) [ 684.501450] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.501450] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 684.502322] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. [ 684.502322] env[61839]: Removing descriptor: 21 [ 684.502322] env[61839]: ERROR nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Traceback (most recent call last): [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] yield resources [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self.driver.spawn(context, instance, image_meta, [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.502322] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] vm_ref = self.build_virtual_machine(instance, [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] for vif in network_info: [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return self._sync_wrapper(fn, *args, **kwargs) [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self.wait() [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self[:] = self._gt.wait() [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return self._exit_event.wait() [ 684.502706] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] result = hub.switch() [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return self.greenlet.switch() [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] result = function(*args, **kwargs) [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return func(*args, **kwargs) [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] raise e [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] nwinfo = self.network_api.allocate_for_instance( [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.503116] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] created_port_ids = self._update_ports_for_instance( [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] with excutils.save_and_reraise_exception(): [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self.force_reraise() [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] raise self.value [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] updated_port = self._update_port( [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] _ensure_no_port_binding_failure(port) [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.503574] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] raise exception.PortBindingFailed(port_id=port['id']) [ 684.504283] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] nova.exception.PortBindingFailed: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. [ 684.504283] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] [ 684.504283] env[61839]: INFO nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Terminating instance [ 684.504283] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Acquiring lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.557841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.868118] env[61839]: DEBUG nova.network.neutron [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.918401] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.960924] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314305, 'name': ReconfigVM_Task, 'duration_secs': 0.278863} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.963828] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f/81ba4888-4b21-410f-ab86-a3068995836f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.964668] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9d2fb6b-76ca-490b-bef5-b0d513755f6f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.970861] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 684.970861] env[61839]: value = "task-1314306" [ 684.970861] env[61839]: _type = "Task" [ 684.970861] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.980891] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314306, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.251260] env[61839]: DEBUG nova.network.neutron [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.278482] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2f3c62-6639-4256-8afc-3fa9ca010ec7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.286908] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2382242b-7c67-4106-a9e9-826b97c94cc6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.291780] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.325781] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad2f20f-9d12-47bc-9a29-aff3df6aacc6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.333543] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5bf32d-4b1b-4c3a-a0ff-2d98528fe65a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.346893] env[61839]: DEBUG nova.compute.provider_tree [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.480864] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314306, 'name': Rename_Task, 'duration_secs': 0.130717} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.481337] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 685.481692] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78be4e38-3799-49bb-ab0b-f8b19aef55ff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.488140] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 685.488140] env[61839]: value = "task-1314307" [ 685.488140] env[61839]: _type = "Task" [ 685.488140] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.495796] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.756362] env[61839]: DEBUG oslo_concurrency.lockutils [req-c9c37774-e588-4627-9b53-293a062493fa req-2c675fc2-c552-4c93-ac11-26907aff5e17 service nova] Releasing lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.756850] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Acquired lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.757094] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.795014] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Releasing lock "refresh_cache-2670f16e-4c44-4b88-937e-9e491f599acb" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.795319] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 685.795509] env[61839]: DEBUG nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 685.795675] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 685.816229] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.851207] env[61839]: DEBUG nova.scheduler.client.report [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 685.999402] env[61839]: DEBUG oslo_vmware.api [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314307, 'name': PowerOnVM_Task, 'duration_secs': 0.447448} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.999658] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.999861] env[61839]: DEBUG nova.compute.manager [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 686.000623] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd72624-9484-4a4f-9833-895e05ea3e75 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.290271] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.318738] env[61839]: DEBUG nova.network.neutron [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.356991] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.996s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.357657] env[61839]: ERROR nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Traceback (most recent call last): [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self.driver.spawn(context, instance, image_meta, [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] vm_ref = self.build_virtual_machine(instance, [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] vif_infos = vmwarevif.get_vif_info(self._session, [ 686.357657] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] for vif in network_info: [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return self._sync_wrapper(fn, *args, **kwargs) [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self.wait() [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self[:] = self._gt.wait() [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return self._exit_event.wait() [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] result = hub.switch() [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.358030] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return self.greenlet.switch() [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] result = function(*args, **kwargs) [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] return func(*args, **kwargs) [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] raise e [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] nwinfo = self.network_api.allocate_for_instance( [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] created_port_ids = self._update_ports_for_instance( [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] with excutils.save_and_reraise_exception(): [ 686.358347] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] self.force_reraise() [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] raise self.value [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] updated_port = self._update_port( [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] _ensure_no_port_binding_failure(port) [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] raise exception.PortBindingFailed(port_id=port['id']) [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] nova.exception.PortBindingFailed: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. [ 686.358671] env[61839]: ERROR nova.compute.manager [instance: 2432a14e-ec45-452c-9592-de690dbc102e] [ 686.358980] env[61839]: DEBUG nova.compute.utils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.360053] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.332s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.361169] env[61839]: INFO nova.compute.claims [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.363743] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Build of instance 2432a14e-ec45-452c-9592-de690dbc102e was re-scheduled: Binding failed for port a1a17fb4-0f77-408c-9dd1-324dedc031a7, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 686.364335] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 686.364559] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Acquiring lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.364705] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Acquired lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.364861] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.403774] env[61839]: DEBUG nova.compute.manager [req-fa2c70d8-4d50-41e0-a9a6-a0409dee4638 req-0b5a4e28-0a4a-4cff-96e0-213252d0cae6 service nova] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Received event network-vif-deleted-06269c90-ada4-4d3c-9956-7781981d3128 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 686.407052] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.515956] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.821894] env[61839]: INFO nova.compute.manager [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] [instance: 2670f16e-4c44-4b88-937e-9e491f599acb] Took 1.03 seconds to deallocate network for instance. [ 686.909124] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.911290] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Releasing lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.911705] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 686.911909] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.912499] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1b63c05-5b0f-4e0c-a4bf-1fd0fcd37695 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.921658] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811d4432-ec08-4124-b01e-e6b4eda5b6f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.944822] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad32bc49-5e52-468a-9d93-390c8649dcae could not be found. [ 686.945060] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.945251] env[61839]: INFO nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Took 0.03 seconds to destroy the instance on the hypervisor. [ 686.945502] env[61839]: DEBUG oslo.service.loopingcall [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.946064] env[61839]: DEBUG nova.compute.manager [-] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 686.946168] env[61839]: DEBUG nova.network.neutron [-] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.985883] env[61839]: DEBUG nova.network.neutron [-] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.001179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "81ba4888-4b21-410f-ab86-a3068995836f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.001179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "81ba4888-4b21-410f-ab86-a3068995836f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.001179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "81ba4888-4b21-410f-ab86-a3068995836f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.001347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "81ba4888-4b21-410f-ab86-a3068995836f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.001482] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "81ba4888-4b21-410f-ab86-a3068995836f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.003923] env[61839]: INFO nova.compute.manager [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Terminating instance [ 687.005517] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "refresh_cache-81ba4888-4b21-410f-ab86-a3068995836f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.005670] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquired lock "refresh_cache-81ba4888-4b21-410f-ab86-a3068995836f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.005832] env[61839]: DEBUG nova.network.neutron [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.102898] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.488413] env[61839]: DEBUG nova.network.neutron [-] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.528101] env[61839]: DEBUG nova.network.neutron [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.606560] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Releasing lock "refresh_cache-2432a14e-ec45-452c-9592-de690dbc102e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.606791] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 687.606972] env[61839]: DEBUG nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 687.607153] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 687.648152] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.656637] env[61839]: DEBUG nova.network.neutron [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.806726] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b31ed41-4236-4d1d-ba3c-3fb23a18b806 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.814609] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc5cb38-d5fa-472a-9c15-20f75b32af9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.850666] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca19560-ae3e-4efd-9abb-c0c3a4062b35 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.858511] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51850a4-c4c1-4390-9237-906beb09b7bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.871401] env[61839]: DEBUG nova.compute.provider_tree [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.873425] env[61839]: INFO nova.scheduler.client.report [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Deleted allocations for instance 2670f16e-4c44-4b88-937e-9e491f599acb [ 687.991774] env[61839]: INFO nova.compute.manager [-] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Took 1.04 seconds to deallocate network for instance. [ 687.994172] env[61839]: DEBUG nova.compute.claims [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 687.994450] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.148722] env[61839]: DEBUG nova.network.neutron [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.163222] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Releasing lock "refresh_cache-81ba4888-4b21-410f-ab86-a3068995836f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.163652] env[61839]: DEBUG nova.compute.manager [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 688.163840] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.164990] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f3d15d-435d-45af-a893-67894e935c3e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.177839] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 688.178906] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bf566a6-ff50-4059-8284-a9dddfe2f19c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.186542] env[61839]: DEBUG oslo_vmware.api [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 688.186542] env[61839]: value = "task-1314308" [ 688.186542] env[61839]: _type = "Task" [ 688.186542] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.195656] env[61839]: DEBUG oslo_vmware.api [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.380294] env[61839]: DEBUG nova.scheduler.client.report [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 688.383941] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b7e16cf6-cab6-4bac-b016-9355722d13b1 tempest-ServerPasswordTestJSON-1977854233 tempest-ServerPasswordTestJSON-1977854233-project-member] Lock "2670f16e-4c44-4b88-937e-9e491f599acb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.701s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.654610] env[61839]: INFO nova.compute.manager [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] [instance: 2432a14e-ec45-452c-9592-de690dbc102e] Took 1.05 seconds to deallocate network for instance. [ 688.700546] env[61839]: DEBUG oslo_vmware.api [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314308, 'name': PowerOffVM_Task, 'duration_secs': 0.115604} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.700867] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.701149] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.701310] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d774b445-d54e-4ea6-b23c-719ba3bbfb73 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.728578] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.728819] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.729008] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Deleting the datastore file [datastore1] 81ba4888-4b21-410f-ab86-a3068995836f {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.729382] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04ccceb5-cb5c-4e37-9739-ef6e9ee8233d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.736022] env[61839]: DEBUG oslo_vmware.api [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for the task: (returnval){ [ 688.736022] env[61839]: value = "task-1314310" [ 688.736022] env[61839]: _type = "Task" [ 688.736022] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.744440] env[61839]: DEBUG oslo_vmware.api [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.885817] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.886434] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 688.888929] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.498s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.890929] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 689.253364] env[61839]: DEBUG oslo_vmware.api [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Task: {'id': task-1314310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091073} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.253364] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.253364] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 689.253364] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 689.253364] env[61839]: INFO nova.compute.manager [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Took 1.09 seconds to destroy the instance on the hypervisor. [ 689.253862] env[61839]: DEBUG oslo.service.loopingcall [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.254220] env[61839]: DEBUG nova.compute.manager [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 689.254453] env[61839]: DEBUG nova.network.neutron [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.277019] env[61839]: DEBUG nova.network.neutron [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.394283] env[61839]: DEBUG nova.compute.utils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.402730] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 689.402730] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.429791] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.472152] env[61839]: DEBUG nova.policy [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83647c05134346c3a025d32237bbc0ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd21bf55ac954e8ca9e24a6eb3069e91', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 689.697592] env[61839]: INFO nova.scheduler.client.report [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Deleted allocations for instance 2432a14e-ec45-452c-9592-de690dbc102e [ 689.784454] env[61839]: DEBUG nova.network.neutron [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.901078] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 689.933597] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 689.933751] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 81ba4888-4b21-410f-ab86-a3068995836f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 689.933956] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 2619b21e-084f-4003-af13-80382bfb1e2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 689.934106] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 0ab450ab-6416-464d-8140-a8c320abb69c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 689.934201] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3ea7af26-14b2-4371-a4f4-48afc190d4bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 689.934313] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance ad32bc49-5e52-468a-9d93-390c8649dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 689.934423] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 83270007-2cbd-49a5-b3a1-1ad58ea2a66c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.040657] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Successfully created port: 12769a9a-e673-4b4b-98ff-f989dadd3ac8 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.090866] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a661cc10-5c4e-421b-b70b-189f0a613e8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.090866] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.207939] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fcabcba5-2e8f-494e-bc5c-79e52812344e tempest-ServerActionsTestJSON-1845148809 tempest-ServerActionsTestJSON-1845148809-project-member] Lock "2432a14e-ec45-452c-9592-de690dbc102e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.649s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.287234] env[61839]: INFO nova.compute.manager [-] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Took 1.03 seconds to deallocate network for instance. [ 690.439775] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.710600] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 690.798693] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.914944] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 690.942017] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:54:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bacc30f3-b05e-4087-9342-e4b401b20ba0',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1983055949',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.942284] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.942445] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.942626] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.942813] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.942945] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.943499] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.943736] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.943936] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.944134] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.944313] env[61839]: DEBUG nova.virt.hardware [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.945016] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 97560b6e-0f50-4cc0-b620-305c82938390 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.948246] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea71ae39-42c5-45ed-aba4-5fbd08048afa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.956906] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7535f2-1d4a-49b7-8fe6-6d9474ad4f7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.237465] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.455876] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d608405b-20d9-42ab-97e3-e129f9c1448b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.602177] env[61839]: DEBUG nova.compute.manager [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Received event network-changed-12769a9a-e673-4b4b-98ff-f989dadd3ac8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 691.602177] env[61839]: DEBUG nova.compute.manager [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Refreshing instance network info cache due to event network-changed-12769a9a-e673-4b4b-98ff-f989dadd3ac8. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 691.602177] env[61839]: DEBUG oslo_concurrency.lockutils [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] Acquiring lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.602177] env[61839]: DEBUG oslo_concurrency.lockutils [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] Acquired lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.602177] env[61839]: DEBUG nova.network.neutron [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Refreshing network info cache for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.948579] env[61839]: ERROR nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. [ 691.948579] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 691.948579] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.948579] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 691.948579] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.948579] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 691.948579] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.948579] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 691.948579] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.948579] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 691.948579] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.948579] env[61839]: ERROR nova.compute.manager raise self.value [ 691.948579] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.948579] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 691.948579] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.948579] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 691.949985] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.949985] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 691.949985] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. [ 691.949985] env[61839]: ERROR nova.compute.manager [ 691.949985] env[61839]: Traceback (most recent call last): [ 691.949985] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 691.949985] env[61839]: listener.cb(fileno) [ 691.949985] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.949985] env[61839]: result = function(*args, **kwargs) [ 691.949985] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.949985] env[61839]: return func(*args, **kwargs) [ 691.949985] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.949985] env[61839]: raise e [ 691.949985] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.949985] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 691.949985] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.949985] env[61839]: created_port_ids = self._update_ports_for_instance( [ 691.949985] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.949985] env[61839]: with excutils.save_and_reraise_exception(): [ 691.949985] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.949985] env[61839]: self.force_reraise() [ 691.949985] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.949985] env[61839]: raise self.value [ 691.949985] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.949985] env[61839]: updated_port = self._update_port( [ 691.949985] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.949985] env[61839]: _ensure_no_port_binding_failure(port) [ 691.949985] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.949985] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 691.950793] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. [ 691.950793] env[61839]: Removing descriptor: 17 [ 691.950793] env[61839]: ERROR nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Traceback (most recent call last): [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] yield resources [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self.driver.spawn(context, instance, image_meta, [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.950793] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] vm_ref = self.build_virtual_machine(instance, [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] for vif in network_info: [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return self._sync_wrapper(fn, *args, **kwargs) [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self.wait() [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self[:] = self._gt.wait() [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return self._exit_event.wait() [ 691.953179] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] result = hub.switch() [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return self.greenlet.switch() [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] result = function(*args, **kwargs) [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return func(*args, **kwargs) [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] raise e [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] nwinfo = self.network_api.allocate_for_instance( [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.953546] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] created_port_ids = self._update_ports_for_instance( [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] with excutils.save_and_reraise_exception(): [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self.force_reraise() [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] raise self.value [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] updated_port = self._update_port( [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] _ensure_no_port_binding_failure(port) [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.953920] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] raise exception.PortBindingFailed(port_id=port['id']) [ 691.954254] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] nova.exception.PortBindingFailed: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. [ 691.954254] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] [ 691.954254] env[61839]: INFO nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Terminating instance [ 691.954254] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.960251] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance eca07795-319e-401d-8f05-41a29bab2689 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.135893] env[61839]: DEBUG nova.network.neutron [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.265178] env[61839]: DEBUG nova.network.neutron [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.464909] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.768777] env[61839]: DEBUG oslo_concurrency.lockutils [req-851743bf-5d28-49a0-a609-6571e1828026 req-6295c294-a640-46ba-b1b7-3b746ab8888d service nova] Releasing lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.768777] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquired lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.768777] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.968056] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 4b7ef74e-4018-4c6e-b540-d65c986d1ff2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.291528] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.337983] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.471142] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 56993a6d-de55-4648-9fd9-31d06a57f300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.626971] env[61839]: DEBUG nova.compute.manager [req-007791b6-1754-4bf3-98c3-d22bb65b6306 req-3a5adcdf-6f94-4fcf-b4f4-2d9fee72b297 service nova] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Received event network-vif-deleted-12769a9a-e673-4b4b-98ff-f989dadd3ac8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 693.841029] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Releasing lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.841363] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 693.841563] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.841879] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-534fe1d8-936d-4cbd-9117-328c372b981c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.852982] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2ecaaa-ee4c-4d58-bf11-6a66e7a86978 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.875120] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 83270007-2cbd-49a5-b3a1-1ad58ea2a66c could not be found. [ 693.875351] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.875527] env[61839]: INFO nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 693.875772] env[61839]: DEBUG oslo.service.loopingcall [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.875977] env[61839]: DEBUG nova.compute.manager [-] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.876079] env[61839]: DEBUG nova.network.neutron [-] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.889892] env[61839]: DEBUG nova.network.neutron [-] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.974468] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 50140f35-6282-41dc-a66c-f041f33769d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.396437] env[61839]: DEBUG nova.network.neutron [-] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.477708] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a89e30e6-b727-440f-a1e8-9c86d19c796d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.899520] env[61839]: INFO nova.compute.manager [-] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Took 1.02 seconds to deallocate network for instance. [ 694.901921] env[61839]: DEBUG nova.compute.claims [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.902116] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.982378] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 337b31e7-a6c9-4f35-9936-62cff06fe2a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.485306] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 59ea60d5-7296-480c-ac03-ec0a7c021300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.990441] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 619ec15b-463a-4daa-bffe-7d7a6022b962 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.493242] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 821b784d-dc69-4c54-bccf-76693c34e19d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.859216] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.859681] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.996495] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance e81bf730-9cf6-4728-aae4-4962115f8b6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 697.499093] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 697222e0-07e5-4a3d-adbe-d5d815cf4756 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.002452] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 0bc0eefd-8a56-4cd6-a0b5-818cc437d917 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.505665] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.010780] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance e65da0fd-e877-4b25-a319-e4d65397056a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.514178] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5c29c188-a34b-4751-9f8b-166af7b15088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.017145] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 603191b6-a4b0-451b-b98b-f3dbfb684300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.521149] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 86525ea7-af75-4b10-85a1-c0fbab73ea5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.024553] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a661cc10-5c4e-421b-b70b-189f0a613e8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.024861] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 701.024997] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 701.357240] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5c14a5-d715-47da-8d5f-ef05cb5c44fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.365470] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6226581-8dfd-4482-96f9-a421b75024f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.395163] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ede678-926b-4ac3-8504-a3ffc0eabec2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.402695] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a85651-1265-40a2-ab57-a388baf6dcbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.417177] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.920646] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 702.428095] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 702.428095] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.538s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.428095] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.052s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.235435] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6649c3-4a5b-4c46-84d8-6f22c36ceb9d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.243485] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff24d96-83f4-4831-91a8-0d80ea575cee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.272519] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d8e6af-3709-4afb-83fd-a97c0dfcf5d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.280362] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8420cb-fc01-42c5-9957-37d719d3d1b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.293918] env[61839]: DEBUG nova.compute.provider_tree [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.797938] env[61839]: DEBUG nova.scheduler.client.report [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.303502] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.876s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.304164] env[61839]: ERROR nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Traceback (most recent call last): [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self.driver.spawn(context, instance, image_meta, [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] vm_ref = self.build_virtual_machine(instance, [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.304164] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] for vif in network_info: [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return self._sync_wrapper(fn, *args, **kwargs) [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self.wait() [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self[:] = self._gt.wait() [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return self._exit_event.wait() [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] result = hub.switch() [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.304567] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return self.greenlet.switch() [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] result = function(*args, **kwargs) [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] return func(*args, **kwargs) [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] raise e [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] nwinfo = self.network_api.allocate_for_instance( [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] created_port_ids = self._update_ports_for_instance( [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] with excutils.save_and_reraise_exception(): [ 704.304933] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] self.force_reraise() [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] raise self.value [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] updated_port = self._update_port( [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] _ensure_no_port_binding_failure(port) [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] raise exception.PortBindingFailed(port_id=port['id']) [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] nova.exception.PortBindingFailed: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. [ 704.305312] env[61839]: ERROR nova.compute.manager [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] [ 704.305588] env[61839]: DEBUG nova.compute.utils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 704.306517] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.863s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.309879] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Build of instance df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e was re-scheduled: Binding failed for port a387e3fb-f832-426b-9aa3-cc499d5fb2cc, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 704.310449] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 704.310738] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Acquiring lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.310919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Acquired lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.311102] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 704.840801] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.968707] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.153558] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6943c6c8-cdfe-4bfc-beff-fdf1c01c3b6d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.160694] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f49a5b-d556-4cd8-8ce6-fa713fa28f2c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.189421] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7373a6-6eda-4be1-b7b5-a029d74669be {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.196586] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cca574-c87e-4573-a297-8ca973d786f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.209983] env[61839]: DEBUG nova.compute.provider_tree [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.471282] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Releasing lock "refresh_cache-df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.471528] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 705.471718] env[61839]: DEBUG nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 705.471889] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 705.487027] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.713070] env[61839]: DEBUG nova.scheduler.client.report [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 705.989207] env[61839]: DEBUG nova.network.neutron [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.218048] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.911s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.218571] env[61839]: ERROR nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Traceback (most recent call last): [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self.driver.spawn(context, instance, image_meta, [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] vm_ref = self.build_virtual_machine(instance, [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.218571] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] for vif in network_info: [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return self._sync_wrapper(fn, *args, **kwargs) [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self.wait() [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self[:] = self._gt.wait() [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return self._exit_event.wait() [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] result = hub.switch() [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 706.218937] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return self.greenlet.switch() [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] result = function(*args, **kwargs) [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] return func(*args, **kwargs) [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] raise e [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] nwinfo = self.network_api.allocate_for_instance( [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] created_port_ids = self._update_ports_for_instance( [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] with excutils.save_and_reraise_exception(): [ 706.219502] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] self.force_reraise() [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] raise self.value [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] updated_port = self._update_port( [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] _ensure_no_port_binding_failure(port) [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] raise exception.PortBindingFailed(port_id=port['id']) [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] nova.exception.PortBindingFailed: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. [ 706.220059] env[61839]: ERROR nova.compute.manager [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] [ 706.220353] env[61839]: DEBUG nova.compute.utils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 706.220533] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.039s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.221931] env[61839]: INFO nova.compute.claims [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.224545] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Build of instance 2619b21e-084f-4003-af13-80382bfb1e2f was re-scheduled: Binding failed for port 7ae9091f-df7a-4c82-90df-81b17f49b944, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 706.224970] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 706.225211] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Acquiring lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.225355] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Acquired lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.225508] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.493382] env[61839]: INFO nova.compute.manager [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] [instance: df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e] Took 1.02 seconds to deallocate network for instance. [ 706.744482] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.808605] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.310401] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Releasing lock "refresh_cache-2619b21e-084f-4003-af13-80382bfb1e2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.310668] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 707.310841] env[61839]: DEBUG nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 707.311039] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 707.325740] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.522499] env[61839]: INFO nova.scheduler.client.report [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Deleted allocations for instance df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e [ 707.565857] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fac0b3e-f01b-4830-81c1-a2488bfd881c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.573682] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a4356a-cc12-41c8-ba5d-78556cc2ff31 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.605216] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2953368f-c68d-4b15-811c-39a31fd7b857 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.612307] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09a9ee7-a698-40b8-ac38-9e5610d8d086 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.625942] env[61839]: DEBUG nova.compute.provider_tree [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.828781] env[61839]: DEBUG nova.network.neutron [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.032719] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dd05ef5d-2024-4ee1-8efc-49a773076423 tempest-ServerAddressesTestJSON-1080876972 tempest-ServerAddressesTestJSON-1080876972-project-member] Lock "df40d0e5-aea4-4f4c-a940-cc2ad89b7e6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.149s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.128589] env[61839]: DEBUG nova.scheduler.client.report [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.332128] env[61839]: INFO nova.compute.manager [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] [instance: 2619b21e-084f-4003-af13-80382bfb1e2f] Took 1.02 seconds to deallocate network for instance. [ 708.535586] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 708.633470] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.633812] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 708.636312] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.683s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.061239] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.142042] env[61839]: DEBUG nova.compute.utils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.143354] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 709.143643] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 709.281187] env[61839]: DEBUG nova.policy [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6576f33959664c029d847a84f37568c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d33af84d07d4d19a67bd42e3ddf7775', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 709.364643] env[61839]: INFO nova.scheduler.client.report [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Deleted allocations for instance 2619b21e-084f-4003-af13-80382bfb1e2f [ 709.544018] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e0eb68-af67-4996-b938-19e105b9607f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.551215] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3441602-4481-4fb7-a989-99277ebea311 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.585930] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3061526-e000-4872-a363-8d793e6cfd27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.593269] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3604f363-5e02-47a9-ba89-b2c4c97e306a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.606649] env[61839]: DEBUG nova.compute.provider_tree [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.649068] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 709.884473] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9e174940-155b-4c43-946d-348c70405c68 tempest-ServersTestJSON-188267051 tempest-ServersTestJSON-188267051-project-member] Lock "2619b21e-084f-4003-af13-80382bfb1e2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.136s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.112027] env[61839]: DEBUG nova.scheduler.client.report [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.246489] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Successfully created port: 9d6dd4ce-4303-47c2-b801-9e788af014ff {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.392619] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 710.620025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.981s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.620025] env[61839]: ERROR nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. [ 710.620025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Traceback (most recent call last): [ 710.620025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 710.620025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self.driver.spawn(context, instance, image_meta, [ 710.620025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 710.620025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 710.620025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 710.620025] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] vm_ref = self.build_virtual_machine(instance, [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] vif_infos = vmwarevif.get_vif_info(self._session, [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] for vif in network_info: [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return self._sync_wrapper(fn, *args, **kwargs) [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self.wait() [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self[:] = self._gt.wait() [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return self._exit_event.wait() [ 710.620399] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] result = hub.switch() [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return self.greenlet.switch() [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] result = function(*args, **kwargs) [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] return func(*args, **kwargs) [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] raise e [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] nwinfo = self.network_api.allocate_for_instance( [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.620800] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] created_port_ids = self._update_ports_for_instance( [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] with excutils.save_and_reraise_exception(): [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] self.force_reraise() [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] raise self.value [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] updated_port = self._update_port( [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] _ensure_no_port_binding_failure(port) [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.621208] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] raise exception.PortBindingFailed(port_id=port['id']) [ 710.621675] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] nova.exception.PortBindingFailed: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. [ 710.621675] env[61839]: ERROR nova.compute.manager [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] [ 710.621675] env[61839]: DEBUG nova.compute.utils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 710.622323] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.113s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.625301] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Build of instance 0ab450ab-6416-464d-8140-a8c320abb69c was re-scheduled: Binding failed for port 848a80b9-d429-4435-98ee-44663831bb58, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 710.626032] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 710.627609] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Acquiring lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.627609] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Acquired lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.627609] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.658053] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 710.686089] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 710.687830] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 710.687830] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.687830] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 710.687830] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.687830] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 710.688094] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 710.688094] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 710.688094] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 710.689093] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 710.689411] env[61839]: DEBUG nova.virt.hardware [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 710.690448] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1aafac-1967-4919-a94e-22ce7af8d9ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.703024] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef24348-6782-4bed-a88e-b02b5215e63c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.924787] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.174396] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.439146] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.549556] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9c2065-30f0-48e7-9137-78aaf00b4afb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.558126] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389dc340-15b7-4a4f-a9e2-8569d3574da7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.592946] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4fd733-de36-4282-934f-366f99eff81a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.602348] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1237c2-f74e-4550-99ed-09f7fc2964b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.617420] env[61839]: DEBUG nova.compute.provider_tree [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.945944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Releasing lock "refresh_cache-0ab450ab-6416-464d-8140-a8c320abb69c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.946797] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 711.946797] env[61839]: DEBUG nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 711.946797] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.970867] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.991361] env[61839]: DEBUG nova.compute.manager [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Received event network-changed-9d6dd4ce-4303-47c2-b801-9e788af014ff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 711.991595] env[61839]: DEBUG nova.compute.manager [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Refreshing instance network info cache due to event network-changed-9d6dd4ce-4303-47c2-b801-9e788af014ff. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 711.991749] env[61839]: DEBUG oslo_concurrency.lockutils [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] Acquiring lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.991887] env[61839]: DEBUG oslo_concurrency.lockutils [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] Acquired lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.992061] env[61839]: DEBUG nova.network.neutron [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Refreshing network info cache for port 9d6dd4ce-4303-47c2-b801-9e788af014ff {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.123064] env[61839]: DEBUG nova.scheduler.client.report [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.313149] env[61839]: ERROR nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. [ 712.313149] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 712.313149] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.313149] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 712.313149] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.313149] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 712.313149] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.313149] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 712.313149] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.313149] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 712.313149] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.313149] env[61839]: ERROR nova.compute.manager raise self.value [ 712.313149] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.313149] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 712.313149] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.313149] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 712.313760] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.313760] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 712.313760] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. [ 712.313760] env[61839]: ERROR nova.compute.manager [ 712.313760] env[61839]: Traceback (most recent call last): [ 712.313760] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 712.313760] env[61839]: listener.cb(fileno) [ 712.313760] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.313760] env[61839]: result = function(*args, **kwargs) [ 712.313760] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.313760] env[61839]: return func(*args, **kwargs) [ 712.313760] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.313760] env[61839]: raise e [ 712.313760] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.313760] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 712.313760] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.313760] env[61839]: created_port_ids = self._update_ports_for_instance( [ 712.313760] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.313760] env[61839]: with excutils.save_and_reraise_exception(): [ 712.313760] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.313760] env[61839]: self.force_reraise() [ 712.313760] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.313760] env[61839]: raise self.value [ 712.313760] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.313760] env[61839]: updated_port = self._update_port( [ 712.313760] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.313760] env[61839]: _ensure_no_port_binding_failure(port) [ 712.313760] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.313760] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 712.314590] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. [ 712.314590] env[61839]: Removing descriptor: 17 [ 712.314590] env[61839]: ERROR nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Traceback (most recent call last): [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] yield resources [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self.driver.spawn(context, instance, image_meta, [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.314590] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] vm_ref = self.build_virtual_machine(instance, [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] for vif in network_info: [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return self._sync_wrapper(fn, *args, **kwargs) [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self.wait() [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self[:] = self._gt.wait() [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return self._exit_event.wait() [ 712.314932] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] result = hub.switch() [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return self.greenlet.switch() [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] result = function(*args, **kwargs) [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return func(*args, **kwargs) [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] raise e [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] nwinfo = self.network_api.allocate_for_instance( [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.315279] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] created_port_ids = self._update_ports_for_instance( [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] with excutils.save_and_reraise_exception(): [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self.force_reraise() [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] raise self.value [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] updated_port = self._update_port( [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] _ensure_no_port_binding_failure(port) [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.315608] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] raise exception.PortBindingFailed(port_id=port['id']) [ 712.315952] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] nova.exception.PortBindingFailed: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. [ 712.315952] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] [ 712.315952] env[61839]: INFO nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Terminating instance [ 712.322732] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Acquiring lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.475149] env[61839]: DEBUG nova.network.neutron [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.516973] env[61839]: DEBUG nova.network.neutron [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.630027] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.005s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.630027] env[61839]: ERROR nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. [ 712.630027] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Traceback (most recent call last): [ 712.630027] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 712.630027] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self.driver.spawn(context, instance, image_meta, [ 712.630027] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 712.630027] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.630027] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.630027] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] vm_ref = self.build_virtual_machine(instance, [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] for vif in network_info: [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return self._sync_wrapper(fn, *args, **kwargs) [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self.wait() [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self[:] = self._gt.wait() [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return self._exit_event.wait() [ 712.631398] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] result = hub.switch() [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return self.greenlet.switch() [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] result = function(*args, **kwargs) [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] return func(*args, **kwargs) [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] raise e [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] nwinfo = self.network_api.allocate_for_instance( [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.631765] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] created_port_ids = self._update_ports_for_instance( [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] with excutils.save_and_reraise_exception(): [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] self.force_reraise() [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] raise self.value [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] updated_port = self._update_port( [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] _ensure_no_port_binding_failure(port) [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.632139] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] raise exception.PortBindingFailed(port_id=port['id']) [ 712.632510] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] nova.exception.PortBindingFailed: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. [ 712.632510] env[61839]: ERROR nova.compute.manager [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] [ 712.632510] env[61839]: DEBUG nova.compute.utils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 712.632510] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.073s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.633466] env[61839]: INFO nova.compute.claims [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.639257] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Build of instance 3ea7af26-14b2-4371-a4f4-48afc190d4bc was re-scheduled: Binding failed for port 7edfbbec-8cb7-4e20-ac8f-419464c8aac9, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 712.640661] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 712.641182] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.641182] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.641789] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.646142] env[61839]: DEBUG nova.network.neutron [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.979370] env[61839]: INFO nova.compute.manager [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] [instance: 0ab450ab-6416-464d-8140-a8c320abb69c] Took 1.03 seconds to deallocate network for instance. [ 713.149902] env[61839]: DEBUG oslo_concurrency.lockutils [req-e83f5b6a-a1ef-4e74-a8c9-e8edd4357062 req-c01d8dd7-5e13-4d07-8062-4b783ffd1d8a service nova] Releasing lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.150981] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Acquired lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.155285] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.176736] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.378558] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.686685] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.820872] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.878371] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-3ea7af26-14b2-4371-a4f4-48afc190d4bc" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.878770] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 713.879041] env[61839]: DEBUG nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 713.879167] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.915194] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.014807] env[61839]: INFO nova.scheduler.client.report [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Deleted allocations for instance 0ab450ab-6416-464d-8140-a8c320abb69c [ 714.055798] env[61839]: DEBUG nova.compute.manager [req-069b5f01-451a-4405-830d-536b3669be36 req-b25fffdc-7075-43b3-97fa-efa0c33c3b55 service nova] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Received event network-vif-deleted-9d6dd4ce-4303-47c2-b801-9e788af014ff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 714.114039] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80de4b1-9aa3-44fe-8ccc-0282991be242 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.122102] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3298a34b-d7f0-4754-a525-3b16bcd6c3c1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.155438] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281835f0-1679-485d-82bb-0302ed2257d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.162733] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee821e4-b391-4a60-8cc8-5b83cdb60fb4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.175700] env[61839]: DEBUG nova.compute.provider_tree [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.323774] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Releasing lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.324041] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 714.324270] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.324550] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d967719a-12df-4fa0-be71-f50ef016657b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.334063] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca86222-c1ec-46a2-b85b-ae71ebf75ca9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.356726] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8 could not be found. [ 714.356941] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.357137] env[61839]: INFO nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 714.357380] env[61839]: DEBUG oslo.service.loopingcall [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.357573] env[61839]: DEBUG nova.compute.manager [-] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.357665] env[61839]: DEBUG nova.network.neutron [-] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.378108] env[61839]: DEBUG nova.network.neutron [-] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.419096] env[61839]: DEBUG nova.network.neutron [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.534022] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7cd3e466-5165-49d9-934b-74ecfbe9a0b9 tempest-ServersV294TestFqdnHostnames-1206786293 tempest-ServersV294TestFqdnHostnames-1206786293-project-member] Lock "0ab450ab-6416-464d-8140-a8c320abb69c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.157s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.679888] env[61839]: DEBUG nova.scheduler.client.report [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 714.881443] env[61839]: DEBUG nova.network.neutron [-] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.921818] env[61839]: INFO nova.compute.manager [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 3ea7af26-14b2-4371-a4f4-48afc190d4bc] Took 1.04 seconds to deallocate network for instance. [ 715.033697] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 715.187898] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.557s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.188016] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 715.192057] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.676s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.192779] env[61839]: DEBUG nova.objects.instance [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 715.387041] env[61839]: INFO nova.compute.manager [-] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Took 1.03 seconds to deallocate network for instance. [ 715.392975] env[61839]: DEBUG nova.compute.claims [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 715.393480] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.563443] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.697022] env[61839]: DEBUG nova.compute.utils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.704981] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 715.705304] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 715.783244] env[61839]: DEBUG nova.policy [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bcde59cc33d41fc9f68c19d6f247fff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '766b763d07a649a3aab0e3299625641a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 715.839999] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.840246] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.959182] env[61839]: INFO nova.scheduler.client.report [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted allocations for instance 3ea7af26-14b2-4371-a4f4-48afc190d4bc [ 716.210522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf14e7b3-0ebe-4036-b698-ec3c92db8125 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.211795] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 716.215322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.221s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.277614] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Successfully created port: 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 716.471035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66411a82-44a4-4e09-b7e5-387750763c23 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "3ea7af26-14b2-4371-a4f4-48afc190d4bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.370s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.973069] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.127423] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f090da6-f275-4738-96eb-5bce47c90098 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.134021] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa27cd34-699d-4132-8a3a-592a36a97013 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.168741] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a6b775-2c06-4bba-baf4-a0520c549678 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.178728] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4be69e2-024a-4240-913c-8afaf56ab1c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.193458] env[61839]: DEBUG nova.compute.provider_tree [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.231389] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 717.264363] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.264654] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.264807] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.265008] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.266135] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.266300] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.266510] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.266790] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.266833] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.267055] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.267214] env[61839]: DEBUG nova.virt.hardware [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.268141] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2813cfa-1516-47c9-9bbd-4743b6835eb5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.278605] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279532c2-d983-401b-9b64-d31d678cb0d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.313030] env[61839]: DEBUG nova.compute.manager [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Received event network-changed-7faeac3b-d75e-4fc3-b914-89c0c9d05ae6 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 717.313030] env[61839]: DEBUG nova.compute.manager [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Refreshing instance network info cache due to event network-changed-7faeac3b-d75e-4fc3-b914-89c0c9d05ae6. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 717.313030] env[61839]: DEBUG oslo_concurrency.lockutils [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] Acquiring lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.314227] env[61839]: DEBUG oslo_concurrency.lockutils [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] Acquired lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.314227] env[61839]: DEBUG nova.network.neutron [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Refreshing network info cache for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.480371] env[61839]: ERROR nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. [ 717.480371] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 717.480371] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.480371] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 717.480371] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.480371] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 717.480371] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.480371] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 717.480371] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.480371] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 717.480371] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.480371] env[61839]: ERROR nova.compute.manager raise self.value [ 717.480371] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.480371] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 717.480371] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.480371] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 717.481079] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.481079] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 717.481079] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. [ 717.481079] env[61839]: ERROR nova.compute.manager [ 717.481079] env[61839]: Traceback (most recent call last): [ 717.481079] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 717.481079] env[61839]: listener.cb(fileno) [ 717.481079] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.481079] env[61839]: result = function(*args, **kwargs) [ 717.481079] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 717.481079] env[61839]: return func(*args, **kwargs) [ 717.481079] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.481079] env[61839]: raise e [ 717.481079] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.481079] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 717.481079] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.481079] env[61839]: created_port_ids = self._update_ports_for_instance( [ 717.481079] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.481079] env[61839]: with excutils.save_and_reraise_exception(): [ 717.481079] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.481079] env[61839]: self.force_reraise() [ 717.481079] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.481079] env[61839]: raise self.value [ 717.481079] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.481079] env[61839]: updated_port = self._update_port( [ 717.481079] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.481079] env[61839]: _ensure_no_port_binding_failure(port) [ 717.481079] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.481079] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 717.482422] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. [ 717.482422] env[61839]: Removing descriptor: 17 [ 717.482422] env[61839]: ERROR nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Traceback (most recent call last): [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] yield resources [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self.driver.spawn(context, instance, image_meta, [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.482422] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] vm_ref = self.build_virtual_machine(instance, [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] for vif in network_info: [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return self._sync_wrapper(fn, *args, **kwargs) [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self.wait() [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self[:] = self._gt.wait() [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return self._exit_event.wait() [ 717.483037] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] result = hub.switch() [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return self.greenlet.switch() [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] result = function(*args, **kwargs) [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return func(*args, **kwargs) [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] raise e [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] nwinfo = self.network_api.allocate_for_instance( [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.483593] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] created_port_ids = self._update_ports_for_instance( [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] with excutils.save_and_reraise_exception(): [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self.force_reraise() [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] raise self.value [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] updated_port = self._update_port( [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] _ensure_no_port_binding_failure(port) [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.484214] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] raise exception.PortBindingFailed(port_id=port['id']) [ 717.484737] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] nova.exception.PortBindingFailed: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. [ 717.484737] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] [ 717.484737] env[61839]: INFO nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Terminating instance [ 717.493981] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.498382] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.699036] env[61839]: DEBUG nova.scheduler.client.report [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.838168] env[61839]: DEBUG nova.network.neutron [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.927164] env[61839]: DEBUG nova.network.neutron [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.202400] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.987s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.203131] env[61839]: ERROR nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Traceback (most recent call last): [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self.driver.spawn(context, instance, image_meta, [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] vm_ref = self.build_virtual_machine(instance, [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] vif_infos = vmwarevif.get_vif_info(self._session, [ 718.203131] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] for vif in network_info: [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return self._sync_wrapper(fn, *args, **kwargs) [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self.wait() [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self[:] = self._gt.wait() [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return self._exit_event.wait() [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] result = hub.switch() [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 718.203500] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return self.greenlet.switch() [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] result = function(*args, **kwargs) [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] return func(*args, **kwargs) [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] raise e [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] nwinfo = self.network_api.allocate_for_instance( [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] created_port_ids = self._update_ports_for_instance( [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] with excutils.save_and_reraise_exception(): [ 718.203877] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] self.force_reraise() [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] raise self.value [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] updated_port = self._update_port( [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] _ensure_no_port_binding_failure(port) [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] raise exception.PortBindingFailed(port_id=port['id']) [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] nova.exception.PortBindingFailed: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. [ 718.204347] env[61839]: ERROR nova.compute.manager [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] [ 718.204683] env[61839]: DEBUG nova.compute.utils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 718.204983] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.775s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.206487] env[61839]: INFO nova.compute.claims [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.211967] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Build of instance ad32bc49-5e52-468a-9d93-390c8649dcae was re-scheduled: Binding failed for port 06269c90-ada4-4d3c-9956-7781981d3128, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 718.211967] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 718.212194] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Acquiring lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.212194] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Acquired lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.212517] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.403909] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "047080fa-8781-47b1-89d8-2e4c8031b164" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.404311] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "047080fa-8781-47b1-89d8-2e4c8031b164" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.430055] env[61839]: DEBUG oslo_concurrency.lockutils [req-bfcf2b7d-b36c-4b20-b687-ee89aab3eb86 req-dbce9eab-44e5-439d-b25e-178feaeab502 service nova] Releasing lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.430502] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquired lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.430720] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.735222] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.861506] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.946952] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.005546] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.343020] env[61839]: DEBUG nova.compute.manager [req-c7dfb7eb-4955-46c6-a0c0-f89ef3c805fe req-df5041ec-e20a-43bf-966d-80dd8e3398c4 service nova] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Received event network-vif-deleted-7faeac3b-d75e-4fc3-b914-89c0c9d05ae6 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.364271] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Releasing lock "refresh_cache-ad32bc49-5e52-468a-9d93-390c8649dcae" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.364473] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 719.364650] env[61839]: DEBUG nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 719.364812] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.382514] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.508752] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Releasing lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.509085] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 719.509295] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.510039] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-898f7822-9640-45e4-81e4-a0c374eec826 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.521849] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34c046e-ca25-470f-8de1-67ee3dee2cf9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.547022] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97560b6e-0f50-4cc0-b620-305c82938390 could not be found. [ 719.547244] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.547420] env[61839]: INFO nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Took 0.04 seconds to destroy the instance on the hypervisor. [ 719.548041] env[61839]: DEBUG oslo.service.loopingcall [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.550787] env[61839]: DEBUG nova.compute.manager [-] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 719.550887] env[61839]: DEBUG nova.network.neutron [-] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.567048] env[61839]: DEBUG nova.network.neutron [-] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.583169] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fac321c-7c22-44fa-96fb-94663a298c51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.591257] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698ce747-e67a-443f-abd5-45f49df17306 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.621200] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f4b948-e798-433a-bbaf-8686d9b54997 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.628478] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a710c4-b9ca-4505-9927-f504a7f34224 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.643053] env[61839]: DEBUG nova.compute.provider_tree [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.885630] env[61839]: DEBUG nova.network.neutron [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.071355] env[61839]: DEBUG nova.network.neutron [-] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.148081] env[61839]: DEBUG nova.scheduler.client.report [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.388625] env[61839]: INFO nova.compute.manager [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] [instance: ad32bc49-5e52-468a-9d93-390c8649dcae] Took 1.02 seconds to deallocate network for instance. [ 720.580044] env[61839]: INFO nova.compute.manager [-] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Took 1.03 seconds to deallocate network for instance. [ 720.585691] env[61839]: DEBUG nova.compute.claims [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 720.585946] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.657226] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.657803] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 720.660458] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.862s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.660734] env[61839]: DEBUG nova.objects.instance [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lazy-loading 'resources' on Instance uuid 81ba4888-4b21-410f-ab86-a3068995836f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.164079] env[61839]: DEBUG nova.compute.utils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.166179] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 721.167094] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 721.230965] env[61839]: DEBUG nova.policy [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c65b833e17ab49d6894b76d8e22ccc64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '434f932a2a9e448d99fac449918affe9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 721.425074] env[61839]: INFO nova.scheduler.client.report [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Deleted allocations for instance ad32bc49-5e52-468a-9d93-390c8649dcae [ 721.610761] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f631ab-6e2f-4410-a998-49bf1057ac6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.625137] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2ad9a0-cbe8-4ccc-8bc2-daccf79aea3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.656702] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecac6309-0bdc-4924-9fee-9599f6481043 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.665221] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f3593-33cd-4c9d-a938-5d204fa1a866 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.671393] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 721.681721] env[61839]: DEBUG nova.compute.provider_tree [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.699112] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "406da948-71c7-4c28-9ee3-10af64b1ab51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.699112] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.769423] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Successfully created port: 6b89e10a-400d-4985-a3b9-998834980373 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 721.936182] env[61839]: DEBUG oslo_concurrency.lockutils [None req-176fdd3e-e9a1-456d-89bc-e1597ce200e4 tempest-ImagesOneServerTestJSON-1580027351 tempest-ImagesOneServerTestJSON-1580027351-project-member] Lock "ad32bc49-5e52-468a-9d93-390c8649dcae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.002s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.189758] env[61839]: DEBUG nova.scheduler.client.report [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.439066] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 722.696900] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 722.699914] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.039s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.702329] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.465s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.706197] env[61839]: INFO nova.compute.claims [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.731284] env[61839]: INFO nova.scheduler.client.report [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Deleted allocations for instance 81ba4888-4b21-410f-ab86-a3068995836f [ 722.739749] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 722.740331] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 722.740385] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.740532] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 722.740678] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.740820] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 722.741037] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 722.741743] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 722.741743] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 722.741743] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 722.742217] env[61839]: DEBUG nova.virt.hardware [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 722.742921] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabb1bb1-289d-407a-b77e-325a2077f030 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.754200] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be14bb3b-1796-4843-9373-6a3a8a5c34c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.954258] env[61839]: DEBUG nova.compute.manager [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Received event network-changed-6b89e10a-400d-4985-a3b9-998834980373 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 722.954531] env[61839]: DEBUG nova.compute.manager [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Refreshing instance network info cache due to event network-changed-6b89e10a-400d-4985-a3b9-998834980373. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 722.956590] env[61839]: DEBUG oslo_concurrency.lockutils [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] Acquiring lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.956590] env[61839]: DEBUG oslo_concurrency.lockutils [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] Acquired lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.956590] env[61839]: DEBUG nova.network.neutron [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Refreshing network info cache for port 6b89e10a-400d-4985-a3b9-998834980373 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 722.966965] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.129295] env[61839]: ERROR nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. [ 723.129295] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 723.129295] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 723.129295] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 723.129295] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.129295] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 723.129295] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.129295] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 723.129295] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.129295] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 723.129295] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.129295] env[61839]: ERROR nova.compute.manager raise self.value [ 723.129295] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.129295] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 723.129295] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.129295] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 723.129911] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.129911] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 723.129911] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. [ 723.129911] env[61839]: ERROR nova.compute.manager [ 723.129911] env[61839]: Traceback (most recent call last): [ 723.129911] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 723.129911] env[61839]: listener.cb(fileno) [ 723.129911] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.129911] env[61839]: result = function(*args, **kwargs) [ 723.129911] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 723.129911] env[61839]: return func(*args, **kwargs) [ 723.129911] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 723.129911] env[61839]: raise e [ 723.129911] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 723.129911] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 723.129911] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.129911] env[61839]: created_port_ids = self._update_ports_for_instance( [ 723.129911] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.129911] env[61839]: with excutils.save_and_reraise_exception(): [ 723.129911] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.129911] env[61839]: self.force_reraise() [ 723.129911] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.129911] env[61839]: raise self.value [ 723.129911] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.129911] env[61839]: updated_port = self._update_port( [ 723.129911] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.129911] env[61839]: _ensure_no_port_binding_failure(port) [ 723.129911] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.129911] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 723.131147] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. [ 723.131147] env[61839]: Removing descriptor: 17 [ 723.131147] env[61839]: ERROR nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Traceback (most recent call last): [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] yield resources [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self.driver.spawn(context, instance, image_meta, [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 723.131147] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] vm_ref = self.build_virtual_machine(instance, [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] vif_infos = vmwarevif.get_vif_info(self._session, [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] for vif in network_info: [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return self._sync_wrapper(fn, *args, **kwargs) [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self.wait() [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self[:] = self._gt.wait() [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return self._exit_event.wait() [ 723.131627] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] result = hub.switch() [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return self.greenlet.switch() [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] result = function(*args, **kwargs) [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return func(*args, **kwargs) [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] raise e [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] nwinfo = self.network_api.allocate_for_instance( [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.132073] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] created_port_ids = self._update_ports_for_instance( [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] with excutils.save_and_reraise_exception(): [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self.force_reraise() [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] raise self.value [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] updated_port = self._update_port( [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] _ensure_no_port_binding_failure(port) [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.132474] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] raise exception.PortBindingFailed(port_id=port['id']) [ 723.132862] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] nova.exception.PortBindingFailed: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. [ 723.132862] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] [ 723.132862] env[61839]: INFO nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Terminating instance [ 723.133733] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.250578] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e5615962-6551-40fe-a767-15a9fd175176 tempest-ServerShowV257Test-1224376874 tempest-ServerShowV257Test-1224376874-project-member] Lock "81ba4888-4b21-410f-ab86-a3068995836f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.250s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.479699] env[61839]: DEBUG nova.network.neutron [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.602760] env[61839]: DEBUG nova.network.neutron [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.083263] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83888027-6996-419d-aa18-64ebc03e452e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.091061] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf73f97-390d-4534-a8fc-661a1dacfd77 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.123950] env[61839]: DEBUG oslo_concurrency.lockutils [req-66a98ff2-a203-4aa8-9c08-7856c3b08d16 req-7f7cec5a-5303-42b6-bdca-36add7847677 service nova] Releasing lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.124838] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquired lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.125808] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.127313] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec2fd42-930d-4889-9e35-45ca2c00ca72 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.138516] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0ade2d-b3db-4583-b6d5-353513d67ee6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.157010] env[61839]: DEBUG nova.compute.provider_tree [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.654708] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.658430] env[61839]: DEBUG nova.scheduler.client.report [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.776756] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.976113] env[61839]: DEBUG nova.compute.manager [req-624aa866-657d-41ca-b0a6-2ce2e85e1047 req-9012b638-8bbe-4858-b975-7260dea33e9a service nova] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Received event network-vif-deleted-6b89e10a-400d-4985-a3b9-998834980373 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.164403] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.164850] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 725.167681] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.266s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.279635] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Releasing lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.280076] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 725.280278] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 725.280555] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30bc9a0b-5bb1-476f-9ada-f0d5d3cd51a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.289471] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9ac9a9-f70f-40ee-a816-07c0ce7b09b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.311307] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d608405b-20d9-42ab-97e3-e129f9c1448b could not be found. [ 725.311517] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 725.311695] env[61839]: INFO nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 725.311948] env[61839]: DEBUG oslo.service.loopingcall [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.312195] env[61839]: DEBUG nova.compute.manager [-] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 725.312303] env[61839]: DEBUG nova.network.neutron [-] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.332640] env[61839]: DEBUG nova.network.neutron [-] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.675595] env[61839]: DEBUG nova.compute.utils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.677571] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 725.677729] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.723531] env[61839]: DEBUG nova.policy [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2717a154d914b988c273c851faecbde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a470a32671ea44589287dbb9fc67d749', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 725.835174] env[61839]: DEBUG nova.network.neutron [-] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.989252] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c1c251-53fa-4399-8846-8a2f709fa4b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.996997] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac4df10-de80-4539-8dd5-595d3b75d393 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.031074] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2454542-d619-45ee-9150-ed06fa5f18c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.038455] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8845ce28-e962-4732-ae88-f49d9b6b4adb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.051151] env[61839]: DEBUG nova.compute.provider_tree [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.081668] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Successfully created port: 25e558e4-fb7a-4a32-887a-7b2c34e1f130 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.178738] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 726.337659] env[61839]: INFO nova.compute.manager [-] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Took 1.03 seconds to deallocate network for instance. [ 726.340242] env[61839]: DEBUG nova.compute.claims [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 726.340418] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.562025] env[61839]: DEBUG nova.scheduler.client.report [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 726.686026] env[61839]: INFO nova.virt.block_device [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Booting with volume 5fa90bfa-8e7e-4375-8e2d-289ae24c9208 at /dev/sda [ 726.745405] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d19b4b9-02bd-4a8c-9778-ceec1bef5ce8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.756017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd93ee51-9333-4949-9906-43bcc453f5e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.781537] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa1ddfd9-7d58-4e18-8e37-9c7d31336e9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.789964] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455f2f12-574e-4421-8433-fbea4be3dcb0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.813786] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f211003-c585-4bd4-bf90-4f9eb56d2799 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.817735] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453f9397-8127-4253-a402-5726b4dbf86d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.832132] env[61839]: DEBUG nova.virt.block_device [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Updating existing volume attachment record: 07f25c1c-8ccc-49bd-8a9c-74de277c4543 {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 727.065896] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.898s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.066578] env[61839]: ERROR nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Traceback (most recent call last): [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self.driver.spawn(context, instance, image_meta, [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] vm_ref = self.build_virtual_machine(instance, [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.066578] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] for vif in network_info: [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return self._sync_wrapper(fn, *args, **kwargs) [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self.wait() [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self[:] = self._gt.wait() [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return self._exit_event.wait() [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] result = hub.switch() [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 727.066949] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return self.greenlet.switch() [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] result = function(*args, **kwargs) [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] return func(*args, **kwargs) [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] raise e [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] nwinfo = self.network_api.allocate_for_instance( [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] created_port_ids = self._update_ports_for_instance( [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] with excutils.save_and_reraise_exception(): [ 727.067327] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] self.force_reraise() [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] raise self.value [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] updated_port = self._update_port( [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] _ensure_no_port_binding_failure(port) [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] raise exception.PortBindingFailed(port_id=port['id']) [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] nova.exception.PortBindingFailed: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. [ 727.067685] env[61839]: ERROR nova.compute.manager [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] [ 727.068013] env[61839]: DEBUG nova.compute.utils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 727.072984] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.008s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.072984] env[61839]: INFO nova.compute.claims [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.075909] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Build of instance 83270007-2cbd-49a5-b3a1-1ad58ea2a66c was re-scheduled: Binding failed for port 12769a9a-e673-4b4b-98ff-f989dadd3ac8, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 727.076896] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 727.076896] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquiring lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.076896] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Acquired lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.076896] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.082240] env[61839]: DEBUG nova.compute.manager [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] [instance: eca07795-319e-401d-8f05-41a29bab2689] Received event network-changed-25e558e4-fb7a-4a32-887a-7b2c34e1f130 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.083203] env[61839]: DEBUG nova.compute.manager [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] [instance: eca07795-319e-401d-8f05-41a29bab2689] Refreshing instance network info cache due to event network-changed-25e558e4-fb7a-4a32-887a-7b2c34e1f130. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 727.083203] env[61839]: DEBUG oslo_concurrency.lockutils [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] Acquiring lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.083203] env[61839]: DEBUG oslo_concurrency.lockutils [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] Acquired lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.083203] env[61839]: DEBUG nova.network.neutron [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] [instance: eca07795-319e-401d-8f05-41a29bab2689] Refreshing network info cache for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.243202] env[61839]: ERROR nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. [ 727.243202] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 727.243202] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.243202] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 727.243202] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.243202] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 727.243202] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.243202] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 727.243202] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.243202] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 727.243202] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.243202] env[61839]: ERROR nova.compute.manager raise self.value [ 727.243202] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.243202] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 727.243202] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.243202] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 727.243810] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.243810] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 727.243810] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. [ 727.243810] env[61839]: ERROR nova.compute.manager [ 727.243810] env[61839]: Traceback (most recent call last): [ 727.243810] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 727.243810] env[61839]: listener.cb(fileno) [ 727.243810] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.243810] env[61839]: result = function(*args, **kwargs) [ 727.243810] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.243810] env[61839]: return func(*args, **kwargs) [ 727.243810] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.243810] env[61839]: raise e [ 727.243810] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.243810] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 727.243810] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.243810] env[61839]: created_port_ids = self._update_ports_for_instance( [ 727.243810] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.243810] env[61839]: with excutils.save_and_reraise_exception(): [ 727.243810] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.243810] env[61839]: self.force_reraise() [ 727.243810] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.243810] env[61839]: raise self.value [ 727.243810] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.243810] env[61839]: updated_port = self._update_port( [ 727.243810] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.243810] env[61839]: _ensure_no_port_binding_failure(port) [ 727.243810] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.243810] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 727.244866] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. [ 727.244866] env[61839]: Removing descriptor: 17 [ 727.599712] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.605224] env[61839]: DEBUG nova.network.neutron [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.673943] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.712829] env[61839]: DEBUG nova.network.neutron [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] [instance: eca07795-319e-401d-8f05-41a29bab2689] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.176270] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Releasing lock "refresh_cache-83270007-2cbd-49a5-b3a1-1ad58ea2a66c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.176494] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 728.176661] env[61839]: DEBUG nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 728.176895] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.193210] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.215626] env[61839]: DEBUG oslo_concurrency.lockutils [req-c52ec46b-77f1-4792-8a4e-3cf045523baa req-9f9c66f1-0f86-4cfd-8221-16b8be760d69 service nova] Releasing lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.390263] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48121277-1a88-41c5-9451-9e2a7afdc829 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.397963] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae092c8-0dbe-44c3-a2bf-243e82b6d3c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.427120] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405c3b83-020e-4097-92d7-e3e4be6f3474 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.434135] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da13cf82-8f10-41d3-8a6f-773c2fc5bd26 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.447053] env[61839]: DEBUG nova.compute.provider_tree [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.696836] env[61839]: DEBUG nova.network.neutron [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.950032] env[61839]: DEBUG nova.scheduler.client.report [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 728.959153] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 728.959790] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.960056] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.960259] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.960478] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.960659] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.960835] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.961121] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.961337] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.961553] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.961770] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.961987] env[61839]: DEBUG nova.virt.hardware [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.962974] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f846e620-2acd-4a01-a6f5-c62080aeb031 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.973719] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fadf7bd-2114-4ced-ae4b-9a7df1f23b5e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.989207] env[61839]: ERROR nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] Traceback (most recent call last): [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] yield resources [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self.driver.spawn(context, instance, image_meta, [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] vm_ref = self.build_virtual_machine(instance, [ 728.989207] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] for vif in network_info: [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] return self._sync_wrapper(fn, *args, **kwargs) [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self.wait() [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self[:] = self._gt.wait() [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] return self._exit_event.wait() [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 728.989670] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] current.throw(*self._exc) [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] result = function(*args, **kwargs) [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] return func(*args, **kwargs) [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] raise e [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] nwinfo = self.network_api.allocate_for_instance( [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] created_port_ids = self._update_ports_for_instance( [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] with excutils.save_and_reraise_exception(): [ 728.990187] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self.force_reraise() [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] raise self.value [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] updated_port = self._update_port( [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] _ensure_no_port_binding_failure(port) [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] raise exception.PortBindingFailed(port_id=port['id']) [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] nova.exception.PortBindingFailed: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. [ 728.990645] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] [ 728.990645] env[61839]: INFO nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Terminating instance [ 728.991778] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Acquiring lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.991911] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Acquired lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.992143] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.107441] env[61839]: DEBUG nova.compute.manager [req-5939558d-cf57-446f-8dc6-18d6eddbede9 req-406c3672-bfab-4969-b6aa-96ee5acfa6ff service nova] [instance: eca07795-319e-401d-8f05-41a29bab2689] Received event network-vif-deleted-25e558e4-fb7a-4a32-887a-7b2c34e1f130 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 729.199169] env[61839]: INFO nova.compute.manager [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] [instance: 83270007-2cbd-49a5-b3a1-1ad58ea2a66c] Took 1.02 seconds to deallocate network for instance. [ 729.456587] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.457087] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 729.460029] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.536s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.461790] env[61839]: INFO nova.compute.claims [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.521315] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.640883] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.967390] env[61839]: DEBUG nova.compute.utils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 729.969586] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 729.969753] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.040938] env[61839]: DEBUG nova.policy [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd61b7cb2fae040c19ddded799dd6b56b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6b1a2f51ff2483a9656a635aedabeae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 730.144011] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Releasing lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.145029] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.145029] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6dd581aa-c910-4dc7-9ac7-bd41a9e8ab3d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.154390] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e7840a-e34e-4c4a-afc5-e6a4c6d20033 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.175763] env[61839]: WARNING nova.virt.vmwareapi.driver [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance eca07795-319e-401d-8f05-41a29bab2689 could not be found. [ 730.176048] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.176372] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5baa1875-59f9-4d21-9c40-9dad589fe697 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.184618] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c51c1ee-1ec1-481a-b818-f1fb3ea42088 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.211336] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eca07795-319e-401d-8f05-41a29bab2689 could not be found. [ 730.211621] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.211796] env[61839]: INFO nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Took 0.07 seconds to destroy the instance on the hypervisor. [ 730.212334] env[61839]: DEBUG oslo.service.loopingcall [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.213223] env[61839]: DEBUG nova.compute.manager [-] [instance: eca07795-319e-401d-8f05-41a29bab2689] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.213223] env[61839]: DEBUG nova.network.neutron [-] [instance: eca07795-319e-401d-8f05-41a29bab2689] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.227777] env[61839]: INFO nova.scheduler.client.report [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Deleted allocations for instance 83270007-2cbd-49a5-b3a1-1ad58ea2a66c [ 730.237830] env[61839]: DEBUG nova.network.neutron [-] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.428823] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Successfully created port: 7a0ef298-7bd2-4f0e-abf2-e22778afc52a {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.471825] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 730.740943] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92400e08-1071-41df-9108-be5fb5ffb9e6 tempest-MigrationsAdminTest-1877210458 tempest-MigrationsAdminTest-1877210458-project-member] Lock "83270007-2cbd-49a5-b3a1-1ad58ea2a66c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 146.812s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.740943] env[61839]: DEBUG nova.network.neutron [-] [instance: eca07795-319e-401d-8f05-41a29bab2689] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.812294] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17abf90-db26-4716-a527-8910bc7d03c6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.821758] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe25517-9f4b-45a5-bc99-d68ecd931a4f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.859912] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0acd7d-baa9-41ed-bda8-dc534aca2176 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.868177] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbc8725-f0ee-404e-8db9-8f098a009309 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.881400] env[61839]: DEBUG nova.compute.provider_tree [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.244803] env[61839]: INFO nova.compute.manager [-] [instance: eca07795-319e-401d-8f05-41a29bab2689] Took 1.03 seconds to deallocate network for instance. [ 731.245236] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 731.377648] env[61839]: DEBUG nova.compute.manager [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Received event network-changed-7a0ef298-7bd2-4f0e-abf2-e22778afc52a {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 731.379247] env[61839]: DEBUG nova.compute.manager [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Refreshing instance network info cache due to event network-changed-7a0ef298-7bd2-4f0e-abf2-e22778afc52a. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 731.379247] env[61839]: DEBUG oslo_concurrency.lockutils [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] Acquiring lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.379247] env[61839]: DEBUG oslo_concurrency.lockutils [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] Acquired lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.379247] env[61839]: DEBUG nova.network.neutron [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Refreshing network info cache for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 731.384366] env[61839]: DEBUG nova.scheduler.client.report [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 731.484592] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 731.511621] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.511967] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.512036] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.512215] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.512360] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.512509] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.512911] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.512911] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.513073] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.513243] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.514096] env[61839]: DEBUG nova.virt.hardware [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.514370] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59e9274-1746-4579-a28c-b6fc59111faf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.523768] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc9d0ee-9b31-486c-a7ea-5ccdb90e49ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.598494] env[61839]: ERROR nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. [ 731.598494] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 731.598494] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.598494] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 731.598494] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.598494] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 731.598494] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.598494] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 731.598494] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.598494] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 731.598494] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.598494] env[61839]: ERROR nova.compute.manager raise self.value [ 731.598494] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.598494] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 731.598494] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.598494] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 731.599558] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.599558] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 731.599558] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. [ 731.599558] env[61839]: ERROR nova.compute.manager [ 731.599558] env[61839]: Traceback (most recent call last): [ 731.599558] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 731.599558] env[61839]: listener.cb(fileno) [ 731.599558] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.599558] env[61839]: result = function(*args, **kwargs) [ 731.599558] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.599558] env[61839]: return func(*args, **kwargs) [ 731.599558] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.599558] env[61839]: raise e [ 731.599558] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.599558] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 731.599558] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.599558] env[61839]: created_port_ids = self._update_ports_for_instance( [ 731.599558] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.599558] env[61839]: with excutils.save_and_reraise_exception(): [ 731.599558] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.599558] env[61839]: self.force_reraise() [ 731.599558] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.599558] env[61839]: raise self.value [ 731.599558] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.599558] env[61839]: updated_port = self._update_port( [ 731.599558] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.599558] env[61839]: _ensure_no_port_binding_failure(port) [ 731.599558] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.599558] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 731.601527] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. [ 731.601527] env[61839]: Removing descriptor: 21 [ 731.601527] env[61839]: ERROR nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Traceback (most recent call last): [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] yield resources [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self.driver.spawn(context, instance, image_meta, [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 731.601527] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] vm_ref = self.build_virtual_machine(instance, [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] for vif in network_info: [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return self._sync_wrapper(fn, *args, **kwargs) [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self.wait() [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self[:] = self._gt.wait() [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return self._exit_event.wait() [ 731.602325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] result = hub.switch() [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return self.greenlet.switch() [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] result = function(*args, **kwargs) [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return func(*args, **kwargs) [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] raise e [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] nwinfo = self.network_api.allocate_for_instance( [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.603181] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] created_port_ids = self._update_ports_for_instance( [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] with excutils.save_and_reraise_exception(): [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self.force_reraise() [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] raise self.value [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] updated_port = self._update_port( [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] _ensure_no_port_binding_failure(port) [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.604138] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] raise exception.PortBindingFailed(port_id=port['id']) [ 731.604908] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] nova.exception.PortBindingFailed: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. [ 731.604908] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] [ 731.604908] env[61839]: INFO nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Terminating instance [ 731.604908] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.772637] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.817805] env[61839]: INFO nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Took 0.57 seconds to detach 1 volumes for instance. [ 731.820074] env[61839]: DEBUG nova.compute.claims [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 731.820254] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.891578] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.891578] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 731.893969] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.501s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.902792] env[61839]: DEBUG nova.network.neutron [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.976504] env[61839]: DEBUG nova.network.neutron [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.398831] env[61839]: DEBUG nova.compute.utils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 732.403082] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 732.403082] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.463813] env[61839]: DEBUG nova.policy [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be420150843446d5b8435c77d36e2141', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce18e7e906d54a2bb8a084f266c66fdc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 732.479036] env[61839]: DEBUG oslo_concurrency.lockutils [req-245191eb-6da6-448d-9ade-4b3b7b4a6aee req-7a39b8c0-91d6-4124-87b3-30989968ba42 service nova] Releasing lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.479911] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquired lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.479911] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.767350] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Successfully created port: ddba8534-6cd9-4257-91ab-8c0543ed5713 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.770729] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e2db28-f9fb-4e2e-b4b3-ab4f201ba16c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.780225] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46aacdb-1b30-41cd-ae1f-76667b7a9bf5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.813785] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853397ff-07e2-48d0-9232-115f78adef1d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.823020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d571764f-b815-484b-ab72-4a082feb2d07 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.834443] env[61839]: DEBUG nova.compute.provider_tree [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.905831] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 733.002819] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.112741] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.338883] env[61839]: DEBUG nova.scheduler.client.report [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.407392] env[61839]: DEBUG nova.compute.manager [req-f76b4d00-5507-4862-99d9-319ab000a8b3 req-291572b3-62db-4c65-927c-ac5abc681aa1 service nova] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Received event network-vif-deleted-7a0ef298-7bd2-4f0e-abf2-e22778afc52a {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.615202] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Releasing lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.615711] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 733.615909] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.616215] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58aca672-3e75-49f8-be6c-a055b975f375 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.625536] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bacd42-4e74-4492-8223-958835bec724 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.652413] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1 could not be found. [ 733.652695] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 733.652909] env[61839]: INFO nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 733.653209] env[61839]: DEBUG oslo.service.loopingcall [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 733.653408] env[61839]: DEBUG nova.compute.manager [-] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 733.653508] env[61839]: DEBUG nova.network.neutron [-] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.674424] env[61839]: DEBUG nova.network.neutron [-] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.847365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.951s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.847365] env[61839]: ERROR nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. [ 733.847365] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Traceback (most recent call last): [ 733.847365] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.847365] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self.driver.spawn(context, instance, image_meta, [ 733.847365] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 733.847365] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.847365] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.847365] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] vm_ref = self.build_virtual_machine(instance, [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] for vif in network_info: [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return self._sync_wrapper(fn, *args, **kwargs) [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self.wait() [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self[:] = self._gt.wait() [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return self._exit_event.wait() [ 733.847699] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] result = hub.switch() [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return self.greenlet.switch() [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] result = function(*args, **kwargs) [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] return func(*args, **kwargs) [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] raise e [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] nwinfo = self.network_api.allocate_for_instance( [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.848146] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] created_port_ids = self._update_ports_for_instance( [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] with excutils.save_and_reraise_exception(): [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] self.force_reraise() [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] raise self.value [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] updated_port = self._update_port( [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] _ensure_no_port_binding_failure(port) [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.848573] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] raise exception.PortBindingFailed(port_id=port['id']) [ 733.848966] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] nova.exception.PortBindingFailed: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. [ 733.848966] env[61839]: ERROR nova.compute.manager [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] [ 733.848966] env[61839]: DEBUG nova.compute.utils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 733.848966] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.284s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.848966] env[61839]: INFO nova.compute.claims [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.852027] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Build of instance 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8 was re-scheduled: Binding failed for port 9d6dd4ce-4303-47c2-b801-9e788af014ff, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 733.853963] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 733.854264] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Acquiring lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.854425] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Acquired lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.854585] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.920499] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 733.954455] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 733.954455] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 733.954455] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.954657] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 733.954657] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.954789] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 733.954962] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 733.955129] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 733.955293] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 733.955459] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 733.955627] env[61839]: DEBUG nova.virt.hardware [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 733.956835] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214d8d85-117b-4444-98ab-681c0e905ad4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.967692] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17026e8-584b-4fe8-8fcb-799b9b1b0436 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.038666] env[61839]: ERROR nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. [ 734.038666] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 734.038666] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.038666] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 734.038666] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.038666] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 734.038666] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.038666] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 734.038666] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.038666] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 734.038666] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.038666] env[61839]: ERROR nova.compute.manager raise self.value [ 734.038666] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.038666] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 734.038666] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.038666] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 734.039267] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.039267] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 734.039267] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. [ 734.039267] env[61839]: ERROR nova.compute.manager [ 734.039267] env[61839]: Traceback (most recent call last): [ 734.039267] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 734.039267] env[61839]: listener.cb(fileno) [ 734.039267] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 734.039267] env[61839]: result = function(*args, **kwargs) [ 734.039267] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 734.039267] env[61839]: return func(*args, **kwargs) [ 734.039267] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 734.039267] env[61839]: raise e [ 734.039267] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.039267] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 734.039267] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.039267] env[61839]: created_port_ids = self._update_ports_for_instance( [ 734.039267] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.039267] env[61839]: with excutils.save_and_reraise_exception(): [ 734.039267] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.039267] env[61839]: self.force_reraise() [ 734.039267] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.039267] env[61839]: raise self.value [ 734.039267] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.039267] env[61839]: updated_port = self._update_port( [ 734.039267] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.039267] env[61839]: _ensure_no_port_binding_failure(port) [ 734.039267] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.039267] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 734.040121] env[61839]: nova.exception.PortBindingFailed: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. [ 734.040121] env[61839]: Removing descriptor: 21 [ 734.040121] env[61839]: ERROR nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Traceback (most recent call last): [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] yield resources [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self.driver.spawn(context, instance, image_meta, [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 734.040121] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] vm_ref = self.build_virtual_machine(instance, [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] vif_infos = vmwarevif.get_vif_info(self._session, [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] for vif in network_info: [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return self._sync_wrapper(fn, *args, **kwargs) [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self.wait() [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self[:] = self._gt.wait() [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return self._exit_event.wait() [ 734.040516] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] result = hub.switch() [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return self.greenlet.switch() [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] result = function(*args, **kwargs) [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return func(*args, **kwargs) [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] raise e [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] nwinfo = self.network_api.allocate_for_instance( [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.040876] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] created_port_ids = self._update_ports_for_instance( [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] with excutils.save_and_reraise_exception(): [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self.force_reraise() [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] raise self.value [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] updated_port = self._update_port( [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] _ensure_no_port_binding_failure(port) [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.041272] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] raise exception.PortBindingFailed(port_id=port['id']) [ 734.041610] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] nova.exception.PortBindingFailed: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. [ 734.041610] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] [ 734.041610] env[61839]: INFO nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Terminating instance [ 734.041610] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Acquiring lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.041610] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Acquired lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.041610] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.174674] env[61839]: DEBUG nova.network.neutron [-] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.379324] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.454591] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.570122] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.677233] env[61839]: INFO nova.compute.manager [-] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Took 1.02 seconds to deallocate network for instance. [ 734.682610] env[61839]: DEBUG nova.compute.claims [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 734.682801] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.724761] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.958046] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Releasing lock "refresh_cache-5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.958482] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 734.958687] env[61839]: DEBUG nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.958883] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.990014] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.198168] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a0b3de-4747-48ea-a418-aaf7bf20696b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.206428] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccb0d8f-0b3f-426c-b29d-e4f9c8d544cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.237931] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Releasing lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.238392] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 735.238589] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 735.239030] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88a6f688-f775-42f3-95fb-47ed3bfcca0e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.241281] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65a4528-440e-4f64-82ff-8a33686784db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.250419] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45499229-8a99-44f8-a8ee-22e840cf2789 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.258738] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52198b7e-5d71-4619-bd12-bb7896c157a1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.280792] env[61839]: DEBUG nova.compute.provider_tree [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.289681] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4b7ef74e-4018-4c6e-b540-d65c986d1ff2 could not be found. [ 735.289910] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.290104] env[61839]: INFO nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 735.290354] env[61839]: DEBUG oslo.service.loopingcall [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.290785] env[61839]: DEBUG nova.compute.manager [-] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 735.290883] env[61839]: DEBUG nova.network.neutron [-] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.324752] env[61839]: DEBUG nova.network.neutron [-] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.439859] env[61839]: DEBUG nova.compute.manager [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Received event network-changed-ddba8534-6cd9-4257-91ab-8c0543ed5713 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.440068] env[61839]: DEBUG nova.compute.manager [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Refreshing instance network info cache due to event network-changed-ddba8534-6cd9-4257-91ab-8c0543ed5713. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 735.440281] env[61839]: DEBUG oslo_concurrency.lockutils [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] Acquiring lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.440425] env[61839]: DEBUG oslo_concurrency.lockutils [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] Acquired lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.440663] env[61839]: DEBUG nova.network.neutron [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Refreshing network info cache for port ddba8534-6cd9-4257-91ab-8c0543ed5713 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 735.492678] env[61839]: DEBUG nova.network.neutron [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.792195] env[61839]: DEBUG nova.scheduler.client.report [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 735.827886] env[61839]: DEBUG nova.network.neutron [-] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.961952] env[61839]: DEBUG nova.network.neutron [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.998949] env[61839]: INFO nova.compute.manager [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] [instance: 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8] Took 1.04 seconds to deallocate network for instance. [ 736.131850] env[61839]: DEBUG nova.network.neutron [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.297212] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.298234] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 736.300348] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.802s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.302039] env[61839]: INFO nova.compute.claims [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.330865] env[61839]: INFO nova.compute.manager [-] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Took 1.04 seconds to deallocate network for instance. [ 736.333761] env[61839]: DEBUG nova.compute.claims [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 736.333761] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.634751] env[61839]: DEBUG oslo_concurrency.lockutils [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] Releasing lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.634751] env[61839]: DEBUG nova.compute.manager [req-7a49b343-370e-4cdf-98ae-8a314961652d req-90adf863-a6e8-4b0a-aa09-f6ab54b64297 service nova] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Received event network-vif-deleted-ddba8534-6cd9-4257-91ab-8c0543ed5713 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 736.807124] env[61839]: DEBUG nova.compute.utils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 736.812699] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 736.812970] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 736.873607] env[61839]: DEBUG nova.policy [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd61b7cb2fae040c19ddded799dd6b56b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6b1a2f51ff2483a9656a635aedabeae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 737.038542] env[61839]: INFO nova.scheduler.client.report [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Deleted allocations for instance 5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8 [ 737.203040] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Successfully created port: e04f298e-523d-4c71-859e-994f85a6983d {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.313964] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 737.550883] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a61be0be-1acd-4bdd-b910-e27219e7a065 tempest-ServersAdminNegativeTestJSON-965769092 tempest-ServersAdminNegativeTestJSON-965769092-project-member] Lock "5d203cc0-ecf7-4b19-bf96-6ad1d6a695f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.243s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.675794] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a38df3a-cfc8-4f71-8985-9df637bd5a47 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.685602] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25811863-6e1a-4438-8f12-122a4d4b1b8c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.721957] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de08b14-05cc-4845-9af5-e2c621202270 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.729933] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc715014-04be-4e86-b1ac-112e1b0e90e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.746018] env[61839]: DEBUG nova.compute.provider_tree [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.054457] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 738.229472] env[61839]: DEBUG nova.compute.manager [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Received event network-changed-e04f298e-523d-4c71-859e-994f85a6983d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 738.230084] env[61839]: DEBUG nova.compute.manager [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Refreshing instance network info cache due to event network-changed-e04f298e-523d-4c71-859e-994f85a6983d. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 738.230084] env[61839]: DEBUG oslo_concurrency.lockutils [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] Acquiring lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.230084] env[61839]: DEBUG oslo_concurrency.lockutils [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] Acquired lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.230217] env[61839]: DEBUG nova.network.neutron [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Refreshing network info cache for port e04f298e-523d-4c71-859e-994f85a6983d {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.248115] env[61839]: DEBUG nova.scheduler.client.report [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.331385] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 738.370695] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 738.370695] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 738.370695] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.371245] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 738.371245] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.371245] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 738.371245] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 738.371245] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 738.371449] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 738.374026] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 738.374026] env[61839]: DEBUG nova.virt.hardware [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.374026] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5b1db3-7c5b-4435-a7c1-49a86398a9e8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.383202] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179f1092-6d7b-4e50-b86a-bf04dd0d90fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.496609] env[61839]: ERROR nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. [ 738.496609] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 738.496609] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.496609] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 738.496609] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.496609] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 738.496609] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.496609] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 738.496609] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.496609] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 738.496609] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.496609] env[61839]: ERROR nova.compute.manager raise self.value [ 738.496609] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.496609] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 738.496609] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.496609] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 738.497383] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.497383] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 738.497383] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. [ 738.497383] env[61839]: ERROR nova.compute.manager [ 738.497383] env[61839]: Traceback (most recent call last): [ 738.497383] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 738.497383] env[61839]: listener.cb(fileno) [ 738.497383] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.497383] env[61839]: result = function(*args, **kwargs) [ 738.497383] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 738.497383] env[61839]: return func(*args, **kwargs) [ 738.497383] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.497383] env[61839]: raise e [ 738.497383] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.497383] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 738.497383] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.497383] env[61839]: created_port_ids = self._update_ports_for_instance( [ 738.497383] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.497383] env[61839]: with excutils.save_and_reraise_exception(): [ 738.497383] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.497383] env[61839]: self.force_reraise() [ 738.497383] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.497383] env[61839]: raise self.value [ 738.497383] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.497383] env[61839]: updated_port = self._update_port( [ 738.497383] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.497383] env[61839]: _ensure_no_port_binding_failure(port) [ 738.497383] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.497383] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 738.498263] env[61839]: nova.exception.PortBindingFailed: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. [ 738.498263] env[61839]: Removing descriptor: 21 [ 738.498263] env[61839]: ERROR nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Traceback (most recent call last): [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] yield resources [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self.driver.spawn(context, instance, image_meta, [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.498263] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] vm_ref = self.build_virtual_machine(instance, [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] for vif in network_info: [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return self._sync_wrapper(fn, *args, **kwargs) [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self.wait() [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self[:] = self._gt.wait() [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return self._exit_event.wait() [ 738.498659] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] result = hub.switch() [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return self.greenlet.switch() [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] result = function(*args, **kwargs) [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return func(*args, **kwargs) [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] raise e [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] nwinfo = self.network_api.allocate_for_instance( [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.499052] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] created_port_ids = self._update_ports_for_instance( [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] with excutils.save_and_reraise_exception(): [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self.force_reraise() [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] raise self.value [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] updated_port = self._update_port( [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] _ensure_no_port_binding_failure(port) [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.499443] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] raise exception.PortBindingFailed(port_id=port['id']) [ 738.499803] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] nova.exception.PortBindingFailed: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. [ 738.499803] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] [ 738.499803] env[61839]: INFO nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Terminating instance [ 738.501179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.577671] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.754019] env[61839]: DEBUG nova.network.neutron [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.758472] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.758472] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 738.760071] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.174s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.856934] env[61839]: DEBUG nova.network.neutron [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.270917] env[61839]: DEBUG nova.compute.utils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 739.272640] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 739.272752] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 739.353962] env[61839]: DEBUG nova.policy [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a635ebf69e04d40ac9b8e7e2ee4f323', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df1b04d9d4c84196899212910f0a120f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 739.360704] env[61839]: DEBUG oslo_concurrency.lockutils [req-ed440a92-85a5-45ab-8c33-387504ba3110 req-ef12e316-2063-4fb6-8536-d83b3293c33f service nova] Releasing lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.363809] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquired lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.364015] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.708647] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea589d8-03b3-46a1-87e0-996b86b6963b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.722300] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4627a51e-c28b-48da-8e07-88e804f587bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.755996] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896c589c-3490-4fdc-a175-d798801d3afd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.769738] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f15ce4d-b63b-4f1e-a2d3-a60338fb5b88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.775760] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 739.788254] env[61839]: DEBUG nova.compute.provider_tree [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.907912] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.006067] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.065492] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Successfully created port: b1d03096-6b67-405d-bb2c-0df90e9beef4 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.266512] env[61839]: DEBUG nova.compute.manager [req-21a470af-5734-4d51-9ad2-01e369f98985 req-dd47702c-d67e-4fd5-b231-3b554c22074b service nova] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Received event network-vif-deleted-e04f298e-523d-4c71-859e-994f85a6983d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 740.291252] env[61839]: DEBUG nova.scheduler.client.report [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.508469] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Releasing lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.508915] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 740.511504] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.511504] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e9787f8-4893-4af3-9b97-0d076c9958ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.523103] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b16350-42f5-4ee8-9aef-0d4681ebe7cb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.548944] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56993a6d-de55-4648-9fd9-31d06a57f300 could not be found. [ 740.549218] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 740.549407] env[61839]: INFO nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Took 0.04 seconds to destroy the instance on the hypervisor. [ 740.549656] env[61839]: DEBUG oslo.service.loopingcall [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.549927] env[61839]: DEBUG nova.compute.manager [-] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 740.550012] env[61839]: DEBUG nova.network.neutron [-] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 740.575140] env[61839]: DEBUG nova.network.neutron [-] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.787018] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 740.797636] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.037s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.798281] env[61839]: ERROR nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Traceback (most recent call last): [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self.driver.spawn(context, instance, image_meta, [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] vm_ref = self.build_virtual_machine(instance, [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.798281] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] for vif in network_info: [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return self._sync_wrapper(fn, *args, **kwargs) [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self.wait() [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self[:] = self._gt.wait() [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return self._exit_event.wait() [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] result = hub.switch() [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 740.798826] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return self.greenlet.switch() [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] result = function(*args, **kwargs) [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] return func(*args, **kwargs) [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] raise e [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] nwinfo = self.network_api.allocate_for_instance( [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] created_port_ids = self._update_ports_for_instance( [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] with excutils.save_and_reraise_exception(): [ 740.799240] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] self.force_reraise() [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] raise self.value [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] updated_port = self._update_port( [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] _ensure_no_port_binding_failure(port) [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] raise exception.PortBindingFailed(port_id=port['id']) [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] nova.exception.PortBindingFailed: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. [ 740.800393] env[61839]: ERROR nova.compute.manager [instance: 97560b6e-0f50-4cc0-b620-305c82938390] [ 740.801053] env[61839]: DEBUG nova.compute.utils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 740.801053] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.833s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.801740] env[61839]: INFO nova.compute.claims [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.804992] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Build of instance 97560b6e-0f50-4cc0-b620-305c82938390 was re-scheduled: Binding failed for port 7faeac3b-d75e-4fc3-b914-89c0c9d05ae6, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 740.805619] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 740.805875] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.806109] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquired lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.806208] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.821205] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 740.821711] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 740.821711] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.821795] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 740.821895] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.822532] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 740.822879] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 740.823086] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 740.823288] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 740.823461] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 740.823635] env[61839]: DEBUG nova.virt.hardware [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 740.824852] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a932d2-ecb4-4f0d-82ac-6c54e8edfc49 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.835891] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27dc071d-06c0-4c46-9c9e-04fb77e1d957 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.078252] env[61839]: DEBUG nova.network.neutron [-] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.346241] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.445214] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.459619] env[61839]: ERROR nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. [ 741.459619] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 741.459619] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.459619] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 741.459619] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.459619] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 741.459619] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.459619] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 741.459619] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.459619] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 741.459619] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.459619] env[61839]: ERROR nova.compute.manager raise self.value [ 741.459619] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.459619] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 741.459619] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.459619] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 741.460240] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.460240] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 741.460240] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. [ 741.460240] env[61839]: ERROR nova.compute.manager [ 741.460240] env[61839]: Traceback (most recent call last): [ 741.460240] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 741.460240] env[61839]: listener.cb(fileno) [ 741.460240] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.460240] env[61839]: result = function(*args, **kwargs) [ 741.460240] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.460240] env[61839]: return func(*args, **kwargs) [ 741.460240] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.460240] env[61839]: raise e [ 741.460240] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.460240] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 741.460240] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.460240] env[61839]: created_port_ids = self._update_ports_for_instance( [ 741.460240] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.460240] env[61839]: with excutils.save_and_reraise_exception(): [ 741.460240] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.460240] env[61839]: self.force_reraise() [ 741.460240] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.460240] env[61839]: raise self.value [ 741.460240] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.460240] env[61839]: updated_port = self._update_port( [ 741.460240] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.460240] env[61839]: _ensure_no_port_binding_failure(port) [ 741.460240] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.460240] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 741.461112] env[61839]: nova.exception.PortBindingFailed: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. [ 741.461112] env[61839]: Removing descriptor: 21 [ 741.461112] env[61839]: ERROR nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Traceback (most recent call last): [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] yield resources [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self.driver.spawn(context, instance, image_meta, [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.461112] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] vm_ref = self.build_virtual_machine(instance, [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] for vif in network_info: [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return self._sync_wrapper(fn, *args, **kwargs) [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self.wait() [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self[:] = self._gt.wait() [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return self._exit_event.wait() [ 741.461545] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] result = hub.switch() [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return self.greenlet.switch() [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] result = function(*args, **kwargs) [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return func(*args, **kwargs) [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] raise e [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] nwinfo = self.network_api.allocate_for_instance( [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.461952] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] created_port_ids = self._update_ports_for_instance( [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] with excutils.save_and_reraise_exception(): [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self.force_reraise() [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] raise self.value [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] updated_port = self._update_port( [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] _ensure_no_port_binding_failure(port) [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.462351] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] raise exception.PortBindingFailed(port_id=port['id']) [ 741.462735] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] nova.exception.PortBindingFailed: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. [ 741.462735] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] [ 741.462735] env[61839]: INFO nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Terminating instance [ 741.465919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.466099] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquired lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.466270] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.585018] env[61839]: INFO nova.compute.manager [-] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Took 1.03 seconds to deallocate network for instance. [ 741.589072] env[61839]: DEBUG nova.compute.claims [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 741.589072] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.715822] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.716064] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.949214] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Releasing lock "refresh_cache-97560b6e-0f50-4cc0-b620-305c82938390" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.949214] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 741.949214] env[61839]: DEBUG nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 741.949214] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.976162] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.996615] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.145122] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.239375] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad01cf0-919f-47c8-b33d-c646561fb449 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.250020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e3eea2-fed6-4c69-a730-37b9ac6027b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.281042] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb3797b-d66e-416d-8371-38361b3187c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.288483] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deab1d05-d71a-4852-9de5-a3bbcbed6475 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.303648] env[61839]: DEBUG nova.compute.provider_tree [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.328470] env[61839]: DEBUG nova.compute.manager [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Received event network-changed-b1d03096-6b67-405d-bb2c-0df90e9beef4 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.328470] env[61839]: DEBUG nova.compute.manager [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Refreshing instance network info cache due to event network-changed-b1d03096-6b67-405d-bb2c-0df90e9beef4. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 742.328928] env[61839]: DEBUG oslo_concurrency.lockutils [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] Acquiring lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.479962] env[61839]: DEBUG nova.network.neutron [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.646173] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Releasing lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.646619] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 742.646808] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.647263] env[61839]: DEBUG oslo_concurrency.lockutils [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] Acquired lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.647463] env[61839]: DEBUG nova.network.neutron [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Refreshing network info cache for port b1d03096-6b67-405d-bb2c-0df90e9beef4 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.650317] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f221e94e-3e54-4ede-af5a-b258177ec9d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.659488] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bba994-7c57-49f0-84a0-b0c6b4192531 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.681855] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 50140f35-6282-41dc-a66c-f041f33769d7 could not be found. [ 742.682514] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.682966] env[61839]: INFO nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 742.684729] env[61839]: DEBUG oslo.service.loopingcall [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.684729] env[61839]: DEBUG nova.compute.manager [-] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.684729] env[61839]: DEBUG nova.network.neutron [-] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.719493] env[61839]: DEBUG nova.network.neutron [-] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.807881] env[61839]: DEBUG nova.scheduler.client.report [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 742.982065] env[61839]: INFO nova.compute.manager [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 97560b6e-0f50-4cc0-b620-305c82938390] Took 1.03 seconds to deallocate network for instance. [ 743.174631] env[61839]: DEBUG nova.network.neutron [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.223059] env[61839]: DEBUG nova.network.neutron [-] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.270206] env[61839]: DEBUG nova.network.neutron [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.313200] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.313956] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 743.321871] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.981s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.570937] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "39728872-2d30-48eb-90da-412f1e45971c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.570937] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "39728872-2d30-48eb-90da-412f1e45971c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.725506] env[61839]: INFO nova.compute.manager [-] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Took 1.04 seconds to deallocate network for instance. [ 743.727684] env[61839]: DEBUG nova.compute.claims [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 743.727858] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.772611] env[61839]: DEBUG oslo_concurrency.lockutils [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] Releasing lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.772892] env[61839]: DEBUG nova.compute.manager [req-d87b9781-4f10-4f90-bc7c-3cb599a46bcc req-2cb5d086-5a75-4c9c-a92f-39c951e112eb service nova] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Received event network-vif-deleted-b1d03096-6b67-405d-bb2c-0df90e9beef4 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 743.828013] env[61839]: DEBUG nova.compute.utils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.832347] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 743.832520] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 743.874411] env[61839]: DEBUG nova.policy [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a635ebf69e04d40ac9b8e7e2ee4f323', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df1b04d9d4c84196899212910f0a120f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 744.012203] env[61839]: INFO nova.scheduler.client.report [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Deleted allocations for instance 97560b6e-0f50-4cc0-b620-305c82938390 [ 744.154025] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e282310-8519-4cbb-9d19-396825cb949e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.164016] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de06f4ec-aaa7-48f8-93e7-93710554a44c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.200886] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Successfully created port: d741ef00-1358-49b5-ab14-a50a00d5024f {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.203348] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eb4e3e-55dc-4940-b865-f4b26422c6f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.211525] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8059aa3d-0eab-4ec3-8c51-3aa5e2f2f3ff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.224758] env[61839]: DEBUG nova.compute.provider_tree [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.333650] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 744.525225] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d67a25ce-8bd3-48bd-87b7-725457f40be1 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "97560b6e-0f50-4cc0-b620-305c82938390" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.092s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.728359] env[61839]: DEBUG nova.scheduler.client.report [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.027662] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 745.234119] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.912s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.234799] env[61839]: ERROR nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Traceback (most recent call last): [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self.driver.spawn(context, instance, image_meta, [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] vm_ref = self.build_virtual_machine(instance, [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.234799] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] for vif in network_info: [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return self._sync_wrapper(fn, *args, **kwargs) [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self.wait() [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self[:] = self._gt.wait() [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return self._exit_event.wait() [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] result = hub.switch() [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 745.235326] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return self.greenlet.switch() [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] result = function(*args, **kwargs) [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] return func(*args, **kwargs) [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] raise e [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] nwinfo = self.network_api.allocate_for_instance( [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] created_port_ids = self._update_ports_for_instance( [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] with excutils.save_and_reraise_exception(): [ 745.236424] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] self.force_reraise() [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] raise self.value [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] updated_port = self._update_port( [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] _ensure_no_port_binding_failure(port) [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] raise exception.PortBindingFailed(port_id=port['id']) [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] nova.exception.PortBindingFailed: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. [ 745.237044] env[61839]: ERROR nova.compute.manager [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] [ 745.237502] env[61839]: DEBUG nova.compute.utils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 745.237502] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.464s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.238278] env[61839]: INFO nova.compute.claims [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.241836] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Build of instance d608405b-20d9-42ab-97e3-e129f9c1448b was re-scheduled: Binding failed for port 6b89e10a-400d-4985-a3b9-998834980373, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 745.242298] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 745.242534] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.242669] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquired lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.242824] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.342779] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 745.370785] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 745.372627] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 745.372627] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.372627] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 745.372627] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.372627] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 745.372907] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 745.372907] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 745.372907] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 745.372907] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 745.372907] env[61839]: DEBUG nova.virt.hardware [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 745.374068] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e9c523-69cc-4f10-9b05-60957e7525ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.381598] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80113688-370c-40a6-b670-c28b5e7e4e5a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.419950] env[61839]: DEBUG nova.compute.manager [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Received event network-changed-d741ef00-1358-49b5-ab14-a50a00d5024f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.420341] env[61839]: DEBUG nova.compute.manager [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Refreshing instance network info cache due to event network-changed-d741ef00-1358-49b5-ab14-a50a00d5024f. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 745.420414] env[61839]: DEBUG oslo_concurrency.lockutils [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] Acquiring lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.420570] env[61839]: DEBUG oslo_concurrency.lockutils [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] Acquired lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.420666] env[61839]: DEBUG nova.network.neutron [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Refreshing network info cache for port d741ef00-1358-49b5-ab14-a50a00d5024f {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.547992] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.592261] env[61839]: ERROR nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. [ 745.592261] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 745.592261] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.592261] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 745.592261] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.592261] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 745.592261] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.592261] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 745.592261] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.592261] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 745.592261] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.592261] env[61839]: ERROR nova.compute.manager raise self.value [ 745.592261] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.592261] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 745.592261] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.592261] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 745.592926] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.592926] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 745.592926] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. [ 745.592926] env[61839]: ERROR nova.compute.manager [ 745.592926] env[61839]: Traceback (most recent call last): [ 745.592926] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 745.592926] env[61839]: listener.cb(fileno) [ 745.592926] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.592926] env[61839]: result = function(*args, **kwargs) [ 745.592926] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.592926] env[61839]: return func(*args, **kwargs) [ 745.592926] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.592926] env[61839]: raise e [ 745.592926] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.592926] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 745.592926] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.592926] env[61839]: created_port_ids = self._update_ports_for_instance( [ 745.592926] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.592926] env[61839]: with excutils.save_and_reraise_exception(): [ 745.592926] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.592926] env[61839]: self.force_reraise() [ 745.592926] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.592926] env[61839]: raise self.value [ 745.592926] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.592926] env[61839]: updated_port = self._update_port( [ 745.592926] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.592926] env[61839]: _ensure_no_port_binding_failure(port) [ 745.592926] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.592926] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 745.594318] env[61839]: nova.exception.PortBindingFailed: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. [ 745.594318] env[61839]: Removing descriptor: 17 [ 745.594318] env[61839]: ERROR nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Traceback (most recent call last): [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] yield resources [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self.driver.spawn(context, instance, image_meta, [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.594318] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] vm_ref = self.build_virtual_machine(instance, [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] for vif in network_info: [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return self._sync_wrapper(fn, *args, **kwargs) [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self.wait() [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self[:] = self._gt.wait() [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return self._exit_event.wait() [ 745.594788] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] result = hub.switch() [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return self.greenlet.switch() [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] result = function(*args, **kwargs) [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return func(*args, **kwargs) [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] raise e [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] nwinfo = self.network_api.allocate_for_instance( [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.595240] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] created_port_ids = self._update_ports_for_instance( [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] with excutils.save_and_reraise_exception(): [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self.force_reraise() [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] raise self.value [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] updated_port = self._update_port( [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] _ensure_no_port_binding_failure(port) [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.595720] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] raise exception.PortBindingFailed(port_id=port['id']) [ 745.596173] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] nova.exception.PortBindingFailed: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. [ 745.596173] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] [ 745.596173] env[61839]: INFO nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Terminating instance [ 745.596173] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.768614] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.867124] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.939311] env[61839]: DEBUG nova.network.neutron [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.974295] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "49d4720b-83e3-47d9-b727-5bb255de2e7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.974525] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.016136] env[61839]: DEBUG nova.network.neutron [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.370629] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Releasing lock "refresh_cache-d608405b-20d9-42ab-97e3-e129f9c1448b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.370919] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 746.371060] env[61839]: DEBUG nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 746.371264] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.394076] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.520865] env[61839]: DEBUG oslo_concurrency.lockutils [req-cae1209c-fdda-400e-b1c8-c1c9d1d9fc49 req-afd73a5a-eb3f-440f-9caa-6249c46e510c service nova] Releasing lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.521729] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquired lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.521729] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.531598] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eb6301-280a-4cea-906b-3039c41dfd61 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.539700] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7daaad4-4b5d-44a7-bb50-96af966d2a2a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.570534] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6ad6cd-287b-45f6-bc12-1a7e218395d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.577936] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ac2c43-e2ee-401c-9a8d-040c0f6e1e2c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.591420] env[61839]: DEBUG nova.compute.provider_tree [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.898432] env[61839]: DEBUG nova.network.neutron [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.039619] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.094063] env[61839]: DEBUG nova.scheduler.client.report [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.120018] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.401789] env[61839]: INFO nova.compute.manager [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: d608405b-20d9-42ab-97e3-e129f9c1448b] Took 1.03 seconds to deallocate network for instance. [ 747.447386] env[61839]: DEBUG nova.compute.manager [req-09bb2314-4279-4c47-af35-45d8c1a969d5 req-98161529-8582-4171-9976-61506bc92061 service nova] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Received event network-vif-deleted-d741ef00-1358-49b5-ab14-a50a00d5024f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 747.599355] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.599823] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 747.603287] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.783s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.623198] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Releasing lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.623624] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 747.623822] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 747.624126] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5697b54a-f421-45c7-8b3f-69fe01a69115 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.633426] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7079c9-8c8a-4b75-8171-fcac7bb5bd7d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.655470] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a89e30e6-b727-440f-a1e8-9c86d19c796d could not be found. [ 747.655584] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.656322] env[61839]: INFO nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 747.656322] env[61839]: DEBUG oslo.service.loopingcall [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.656322] env[61839]: DEBUG nova.compute.manager [-] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 747.656322] env[61839]: DEBUG nova.network.neutron [-] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 747.675262] env[61839]: DEBUG nova.network.neutron [-] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.104814] env[61839]: DEBUG nova.compute.utils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 748.106518] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 748.106704] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 748.148642] env[61839]: DEBUG nova.policy [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef80f7db912a4e33a5a50e7432a01ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d28bf7713204dfb9682d9c002cb5449', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 748.178099] env[61839]: DEBUG nova.network.neutron [-] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.398056] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5e786a-f26c-458a-9353-42e7397e405b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.407672] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25928820-cb57-421d-991c-6dc54c38351d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.435791] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066d4f41-b695-4008-ba77-c89dee5c45b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.442999] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ea186a-da14-48f3-be8e-c5876286321d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.447573] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Successfully created port: 499ebbdf-24ab-4dda-b4db-a9c532f944e3 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.450797] env[61839]: INFO nova.scheduler.client.report [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Deleted allocations for instance d608405b-20d9-42ab-97e3-e129f9c1448b [ 748.465687] env[61839]: DEBUG nova.compute.provider_tree [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.609839] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 748.683052] env[61839]: INFO nova.compute.manager [-] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Took 1.03 seconds to deallocate network for instance. [ 748.685537] env[61839]: DEBUG nova.compute.claims [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 748.685716] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.970054] env[61839]: DEBUG nova.scheduler.client.report [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 748.974359] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f3a9d5df-26df-4d8c-b25e-3cb525f9f793 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "d608405b-20d9-42ab-97e3-e129f9c1448b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.814s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.474752] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.872s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.478018] env[61839]: ERROR nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] Traceback (most recent call last): [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self.driver.spawn(context, instance, image_meta, [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] vm_ref = self.build_virtual_machine(instance, [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.478018] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] for vif in network_info: [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] return self._sync_wrapper(fn, *args, **kwargs) [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self.wait() [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self[:] = self._gt.wait() [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] return self._exit_event.wait() [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] current.throw(*self._exc) [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.478556] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] result = function(*args, **kwargs) [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] return func(*args, **kwargs) [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] raise e [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] nwinfo = self.network_api.allocate_for_instance( [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] created_port_ids = self._update_ports_for_instance( [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] with excutils.save_and_reraise_exception(): [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] self.force_reraise() [ 749.479033] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] raise self.value [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] updated_port = self._update_port( [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] _ensure_no_port_binding_failure(port) [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] raise exception.PortBindingFailed(port_id=port['id']) [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] nova.exception.PortBindingFailed: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. [ 749.479568] env[61839]: ERROR nova.compute.manager [instance: eca07795-319e-401d-8f05-41a29bab2689] [ 749.479568] env[61839]: DEBUG nova.compute.utils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 749.484078] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.795s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.484078] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 749.488016] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Build of instance eca07795-319e-401d-8f05-41a29bab2689 was re-scheduled: Binding failed for port 25e558e4-fb7a-4a32-887a-7b2c34e1f130, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 749.488016] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 749.489843] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Acquiring lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.489843] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Acquired lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.489843] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.604843] env[61839]: DEBUG nova.compute.manager [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Received event network-changed-499ebbdf-24ab-4dda-b4db-a9c532f944e3 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 749.605050] env[61839]: DEBUG nova.compute.manager [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Refreshing instance network info cache due to event network-changed-499ebbdf-24ab-4dda-b4db-a9c532f944e3. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 749.605269] env[61839]: DEBUG oslo_concurrency.lockutils [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] Acquiring lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.605418] env[61839]: DEBUG oslo_concurrency.lockutils [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] Acquired lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.605695] env[61839]: DEBUG nova.network.neutron [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Refreshing network info cache for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.619848] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 749.647388] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.647629] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.647784] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.647964] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.648128] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.648277] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.648477] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.648634] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.648798] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.648956] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.649142] env[61839]: DEBUG nova.virt.hardware [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.650206] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c3bd68-c458-4367-8182-a6679be8f0b9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.661014] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e0a5b0-c6e0-4863-a030-728071a82996 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.838437] env[61839]: ERROR nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. [ 749.838437] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 749.838437] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.838437] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 749.838437] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.838437] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 749.838437] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.838437] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 749.838437] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.838437] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 749.838437] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.838437] env[61839]: ERROR nova.compute.manager raise self.value [ 749.838437] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.838437] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 749.838437] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.838437] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 749.839138] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.839138] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 749.839138] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. [ 749.839138] env[61839]: ERROR nova.compute.manager [ 749.839138] env[61839]: Traceback (most recent call last): [ 749.839138] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 749.839138] env[61839]: listener.cb(fileno) [ 749.839138] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.839138] env[61839]: result = function(*args, **kwargs) [ 749.839138] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.839138] env[61839]: return func(*args, **kwargs) [ 749.839138] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.839138] env[61839]: raise e [ 749.839138] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.839138] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 749.839138] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.839138] env[61839]: created_port_ids = self._update_ports_for_instance( [ 749.839138] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.839138] env[61839]: with excutils.save_and_reraise_exception(): [ 749.839138] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.839138] env[61839]: self.force_reraise() [ 749.839138] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.839138] env[61839]: raise self.value [ 749.839138] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.839138] env[61839]: updated_port = self._update_port( [ 749.839138] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.839138] env[61839]: _ensure_no_port_binding_failure(port) [ 749.839138] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.839138] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 749.840383] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. [ 749.840383] env[61839]: Removing descriptor: 17 [ 749.840383] env[61839]: ERROR nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Traceback (most recent call last): [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] yield resources [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self.driver.spawn(context, instance, image_meta, [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.840383] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] vm_ref = self.build_virtual_machine(instance, [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] for vif in network_info: [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return self._sync_wrapper(fn, *args, **kwargs) [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self.wait() [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self[:] = self._gt.wait() [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return self._exit_event.wait() [ 749.840833] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] result = hub.switch() [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return self.greenlet.switch() [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] result = function(*args, **kwargs) [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return func(*args, **kwargs) [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] raise e [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] nwinfo = self.network_api.allocate_for_instance( [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.841334] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] created_port_ids = self._update_ports_for_instance( [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] with excutils.save_and_reraise_exception(): [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self.force_reraise() [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] raise self.value [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] updated_port = self._update_port( [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] _ensure_no_port_binding_failure(port) [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.841829] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] raise exception.PortBindingFailed(port_id=port['id']) [ 749.842640] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] nova.exception.PortBindingFailed: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. [ 749.842640] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] [ 749.842640] env[61839]: INFO nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Terminating instance [ 749.842640] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.013364] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.018172] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.139439] env[61839]: DEBUG nova.network.neutron [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.179723] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.262799] env[61839]: DEBUG nova.network.neutron [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.341671] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d82444-3c74-4c88-9ffe-3ba0e37908b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.349679] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f94bade-c60b-40d8-9ca1-ee0657702c68 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.380817] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9432bffc-e967-442d-8c01-4e849c6097ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.388062] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385985e5-6d8b-435c-bc26-b933754296fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.403923] env[61839]: DEBUG nova.compute.provider_tree [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.681217] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Releasing lock "refresh_cache-eca07795-319e-401d-8f05-41a29bab2689" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.681548] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 750.681715] env[61839]: DEBUG nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 750.681885] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.708641] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.763610] env[61839]: DEBUG oslo_concurrency.lockutils [req-8b23af29-1006-4cc8-a65f-d69b8f7aac6e req-796c36f3-952f-45b7-b883-add8bb92e217 service nova] Releasing lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.764047] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.764241] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.907503] env[61839]: DEBUG nova.scheduler.client.report [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 751.211492] env[61839]: DEBUG nova.network.neutron [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.296847] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.412464] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.934s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.413094] env[61839]: ERROR nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Traceback (most recent call last): [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self.driver.spawn(context, instance, image_meta, [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] vm_ref = self.build_virtual_machine(instance, [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 751.413094] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] for vif in network_info: [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return self._sync_wrapper(fn, *args, **kwargs) [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self.wait() [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self[:] = self._gt.wait() [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return self._exit_event.wait() [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] result = hub.switch() [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 751.413494] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return self.greenlet.switch() [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] result = function(*args, **kwargs) [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] return func(*args, **kwargs) [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] raise e [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] nwinfo = self.network_api.allocate_for_instance( [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] created_port_ids = self._update_ports_for_instance( [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] with excutils.save_and_reraise_exception(): [ 751.413892] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] self.force_reraise() [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] raise self.value [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] updated_port = self._update_port( [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] _ensure_no_port_binding_failure(port) [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] raise exception.PortBindingFailed(port_id=port['id']) [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] nova.exception.PortBindingFailed: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. [ 751.414325] env[61839]: ERROR nova.compute.manager [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] [ 751.414652] env[61839]: DEBUG nova.compute.utils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 751.416494] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.082s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.418778] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Build of instance fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1 was re-scheduled: Binding failed for port 7a0ef298-7bd2-4f0e-abf2-e22778afc52a, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 751.419234] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 751.419517] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.419705] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquired lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.419926] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.421530] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.714901] env[61839]: INFO nova.compute.manager [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] [instance: eca07795-319e-401d-8f05-41a29bab2689] Took 1.03 seconds to deallocate network for instance. [ 751.749692] env[61839]: DEBUG nova.compute.manager [req-b854207e-c3bc-46cf-b6bd-513462cbfd03 req-9604b0b4-48ee-4a8c-b19b-9a122ee12c71 service nova] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Received event network-vif-deleted-499ebbdf-24ab-4dda-b4db-a9c532f944e3 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 751.928504] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.928911] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 751.929115] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.929432] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28180333-c581-42d9-aae5-48653cd998d5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.939082] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1238dc58-0ae1-4825-92a1-f8a04b96cb79 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.952288] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.964635] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 337b31e7-a6c9-4f35-9936-62cff06fe2a1 could not be found. [ 751.964816] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.965022] env[61839]: INFO nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 751.965418] env[61839]: DEBUG oslo.service.loopingcall [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.969137] env[61839]: DEBUG nova.compute.manager [-] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 751.969244] env[61839]: DEBUG nova.network.neutron [-] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.989020] env[61839]: DEBUG nova.network.neutron [-] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.034014] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.224454] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dc1004-a061-42e1-8ea4-74acda8091dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.234117] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba42df28-63cf-4883-a653-cfbf85f3897e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.275195] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b0ddfc-b14f-42ef-a9b3-1966e6a511f9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.285795] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2649f74-57f8-49db-8708-09a26dd39515 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.303854] env[61839]: DEBUG nova.compute.provider_tree [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.339696] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.339942] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.491192] env[61839]: DEBUG nova.network.neutron [-] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.537837] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Releasing lock "refresh_cache-fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.537837] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 752.537837] env[61839]: DEBUG nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 752.537837] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.551255] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.746178] env[61839]: INFO nova.scheduler.client.report [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Deleted allocations for instance eca07795-319e-401d-8f05-41a29bab2689 [ 752.810510] env[61839]: DEBUG nova.scheduler.client.report [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 752.995068] env[61839]: INFO nova.compute.manager [-] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Took 1.02 seconds to deallocate network for instance. [ 752.997831] env[61839]: DEBUG nova.compute.claims [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 752.997831] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.053970] env[61839]: DEBUG nova.network.neutron [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.254526] env[61839]: DEBUG oslo_concurrency.lockutils [None req-91f7d267-6fad-4c7d-b66a-41e5344a2deb tempest-ServersTestBootFromVolume-874886378 tempest-ServersTestBootFromVolume-874886378-project-member] Lock "eca07795-319e-401d-8f05-41a29bab2689" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 161.913s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.315760] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.316423] env[61839]: ERROR nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Traceback (most recent call last): [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self.driver.spawn(context, instance, image_meta, [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] vm_ref = self.build_virtual_machine(instance, [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] vif_infos = vmwarevif.get_vif_info(self._session, [ 753.316423] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] for vif in network_info: [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return self._sync_wrapper(fn, *args, **kwargs) [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self.wait() [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self[:] = self._gt.wait() [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return self._exit_event.wait() [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] result = hub.switch() [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 753.316830] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return self.greenlet.switch() [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] result = function(*args, **kwargs) [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] return func(*args, **kwargs) [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] raise e [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] nwinfo = self.network_api.allocate_for_instance( [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] created_port_ids = self._update_ports_for_instance( [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] with excutils.save_and_reraise_exception(): [ 753.317308] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] self.force_reraise() [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] raise self.value [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] updated_port = self._update_port( [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] _ensure_no_port_binding_failure(port) [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] raise exception.PortBindingFailed(port_id=port['id']) [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] nova.exception.PortBindingFailed: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. [ 753.317760] env[61839]: ERROR nova.compute.manager [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] [ 753.318179] env[61839]: DEBUG nova.compute.utils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 753.318363] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.741s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.319869] env[61839]: INFO nova.compute.claims [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.322447] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Build of instance 4b7ef74e-4018-4c6e-b540-d65c986d1ff2 was re-scheduled: Binding failed for port ddba8534-6cd9-4257-91ab-8c0543ed5713, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 753.322890] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 753.323127] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Acquiring lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.323309] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Acquired lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.323478] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.557372] env[61839]: INFO nova.compute.manager [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1] Took 1.02 seconds to deallocate network for instance. [ 753.757553] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 753.847039] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.966132] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.278337] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.469162] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Releasing lock "refresh_cache-4b7ef74e-4018-4c6e-b540-d65c986d1ff2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.469439] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 754.469617] env[61839]: DEBUG nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 754.469775] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 754.496168] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.596202] env[61839]: INFO nova.scheduler.client.report [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Deleted allocations for instance fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1 [ 754.688376] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f7a89a-155c-407a-9b24-b5575f69ec66 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.695668] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a53f01-9a7f-4d7f-9514-b220de703d08 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.727288] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeaedfb6-2da0-4079-b883-06b8043542b9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.735044] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df97075-1939-4f71-af42-1cde968618be {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.749198] env[61839]: DEBUG nova.compute.provider_tree [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.998316] env[61839]: DEBUG nova.network.neutron [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.108428] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f0f367ff-b05e-4ad3-a843-4493b18ee6be tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "fe275ee4-b6e5-42e1-b2bb-f87b6b3274c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.121s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.252950] env[61839]: DEBUG nova.scheduler.client.report [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 755.501364] env[61839]: INFO nova.compute.manager [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] [instance: 4b7ef74e-4018-4c6e-b540-d65c986d1ff2] Took 1.03 seconds to deallocate network for instance. [ 755.611373] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 755.762623] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.763998] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 755.767544] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.180s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.132429] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.268770] env[61839]: DEBUG nova.compute.utils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 756.270175] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 756.270773] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.332664] env[61839]: DEBUG nova.policy [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44f2a3eced404b879531da791fe18382', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df18527cde6b460780c0bb728606e912', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 756.539477] env[61839]: INFO nova.scheduler.client.report [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Deleted allocations for instance 4b7ef74e-4018-4c6e-b540-d65c986d1ff2 [ 756.587193] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0a629c-6983-44f3-8483-829e6d9744ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.596400] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09084590-689c-42de-a34b-6cf9aeea9b98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.629930] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dab7a0-6519-40ba-bb6b-689231a80882 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.638217] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29e9920-c040-4b8f-a679-836f47c1f206 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.652370] env[61839]: DEBUG nova.compute.provider_tree [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.716785] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Successfully created port: c786aa01-deb8-40f7-9a8a-7f953d974aff {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.773046] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 757.049538] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fbb0b67-167b-4268-b2f4-5c0b4d6a4a9c tempest-ServersNegativeTestMultiTenantJSON-1778640072 tempest-ServersNegativeTestMultiTenantJSON-1778640072-project-member] Lock "4b7ef74e-4018-4c6e-b540-d65c986d1ff2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 164.940s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.155902] env[61839]: DEBUG nova.scheduler.client.report [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 757.555514] env[61839]: DEBUG nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 757.579069] env[61839]: DEBUG nova.compute.manager [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Received event network-changed-c786aa01-deb8-40f7-9a8a-7f953d974aff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 757.579267] env[61839]: DEBUG nova.compute.manager [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Refreshing instance network info cache due to event network-changed-c786aa01-deb8-40f7-9a8a-7f953d974aff. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 757.579484] env[61839]: DEBUG oslo_concurrency.lockutils [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] Acquiring lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.579627] env[61839]: DEBUG oslo_concurrency.lockutils [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] Acquired lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.579785] env[61839]: DEBUG nova.network.neutron [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Refreshing network info cache for port c786aa01-deb8-40f7-9a8a-7f953d974aff {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.664063] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.897s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.665061] env[61839]: ERROR nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Traceback (most recent call last): [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self.driver.spawn(context, instance, image_meta, [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] vm_ref = self.build_virtual_machine(instance, [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.665061] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] for vif in network_info: [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return self._sync_wrapper(fn, *args, **kwargs) [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self.wait() [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self[:] = self._gt.wait() [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return self._exit_event.wait() [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] result = hub.switch() [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.665482] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return self.greenlet.switch() [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] result = function(*args, **kwargs) [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] return func(*args, **kwargs) [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] raise e [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] nwinfo = self.network_api.allocate_for_instance( [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] created_port_ids = self._update_ports_for_instance( [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] with excutils.save_and_reraise_exception(): [ 757.665922] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] self.force_reraise() [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] raise self.value [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] updated_port = self._update_port( [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] _ensure_no_port_binding_failure(port) [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] raise exception.PortBindingFailed(port_id=port['id']) [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] nova.exception.PortBindingFailed: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. [ 757.666366] env[61839]: ERROR nova.compute.manager [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] [ 757.666697] env[61839]: DEBUG nova.compute.utils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 757.667779] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.940s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.674135] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Build of instance 56993a6d-de55-4648-9fd9-31d06a57f300 was re-scheduled: Binding failed for port e04f298e-523d-4c71-859e-994f85a6983d, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 757.674954] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 757.675649] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquiring lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.675776] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Acquired lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.675942] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.780781] env[61839]: ERROR nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. [ 757.780781] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 757.780781] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.780781] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 757.780781] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.780781] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 757.780781] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.780781] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 757.780781] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.780781] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 757.780781] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.780781] env[61839]: ERROR nova.compute.manager raise self.value [ 757.780781] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.780781] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 757.780781] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.780781] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 757.781575] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.781575] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 757.781575] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. [ 757.781575] env[61839]: ERROR nova.compute.manager [ 757.781575] env[61839]: Traceback (most recent call last): [ 757.781575] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 757.781575] env[61839]: listener.cb(fileno) [ 757.781575] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.781575] env[61839]: result = function(*args, **kwargs) [ 757.781575] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 757.781575] env[61839]: return func(*args, **kwargs) [ 757.781575] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.781575] env[61839]: raise e [ 757.781575] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.781575] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 757.781575] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.781575] env[61839]: created_port_ids = self._update_ports_for_instance( [ 757.781575] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.781575] env[61839]: with excutils.save_and_reraise_exception(): [ 757.781575] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.781575] env[61839]: self.force_reraise() [ 757.781575] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.781575] env[61839]: raise self.value [ 757.781575] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.781575] env[61839]: updated_port = self._update_port( [ 757.781575] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.781575] env[61839]: _ensure_no_port_binding_failure(port) [ 757.781575] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.781575] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 757.782685] env[61839]: nova.exception.PortBindingFailed: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. [ 757.782685] env[61839]: Removing descriptor: 21 [ 757.783070] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 757.810120] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.810401] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.810557] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.810735] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.810877] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.811031] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.811270] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.811436] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.811603] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.811762] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.811933] env[61839]: DEBUG nova.virt.hardware [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.812800] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee96051-0be8-4b7b-bb8a-7ae2eb58c640 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.820822] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58021db-3a84-41e6-b7fe-d9d8e7280886 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.836557] env[61839]: ERROR nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Traceback (most recent call last): [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] yield resources [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self.driver.spawn(context, instance, image_meta, [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] vm_ref = self.build_virtual_machine(instance, [ 757.836557] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] for vif in network_info: [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] return self._sync_wrapper(fn, *args, **kwargs) [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self.wait() [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self[:] = self._gt.wait() [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] return self._exit_event.wait() [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 757.836999] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] current.throw(*self._exc) [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] result = function(*args, **kwargs) [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] return func(*args, **kwargs) [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] raise e [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] nwinfo = self.network_api.allocate_for_instance( [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] created_port_ids = self._update_ports_for_instance( [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] with excutils.save_and_reraise_exception(): [ 757.837443] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self.force_reraise() [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] raise self.value [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] updated_port = self._update_port( [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] _ensure_no_port_binding_failure(port) [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] raise exception.PortBindingFailed(port_id=port['id']) [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] nova.exception.PortBindingFailed: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. [ 757.837969] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] [ 757.837969] env[61839]: INFO nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Terminating instance [ 757.838717] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Acquiring lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.076161] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.097013] env[61839]: DEBUG nova.network.neutron [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.198930] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.252035] env[61839]: DEBUG nova.network.neutron [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.290042] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.494615] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d8d24a-09f7-4ffc-b2c9-e7b734ca62ef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.502188] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91a5f65-5a84-4622-b304-4b89abe6374d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.532203] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779af0a1-1118-44d7-ad96-143419cd57fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.539419] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988ebb6f-2400-4297-818d-15770e3f2394 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.552174] env[61839]: DEBUG nova.compute.provider_tree [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.755363] env[61839]: DEBUG oslo_concurrency.lockutils [req-4a6695c9-0b1f-402f-90c8-6089ef116dae req-255ed34c-87c3-440f-a012-f6664c103bd6 service nova] Releasing lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.756723] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Acquired lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.756723] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.785934] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.786201] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.792405] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Releasing lock "refresh_cache-56993a6d-de55-4648-9fd9-31d06a57f300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.792642] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 758.792781] env[61839]: DEBUG nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 758.792947] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 758.823608] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.054948] env[61839]: DEBUG nova.scheduler.client.report [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.273240] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.293435] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.293625] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 759.293742] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 759.326710] env[61839]: DEBUG nova.network.neutron [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.388825] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.561071] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.893s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.561738] env[61839]: ERROR nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Traceback (most recent call last): [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self.driver.spawn(context, instance, image_meta, [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] vm_ref = self.build_virtual_machine(instance, [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] vif_infos = vmwarevif.get_vif_info(self._session, [ 759.561738] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] for vif in network_info: [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return self._sync_wrapper(fn, *args, **kwargs) [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self.wait() [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self[:] = self._gt.wait() [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return self._exit_event.wait() [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] result = hub.switch() [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 759.562286] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return self.greenlet.switch() [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] result = function(*args, **kwargs) [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] return func(*args, **kwargs) [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] raise e [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] nwinfo = self.network_api.allocate_for_instance( [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] created_port_ids = self._update_ports_for_instance( [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] with excutils.save_and_reraise_exception(): [ 759.562681] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] self.force_reraise() [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] raise self.value [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] updated_port = self._update_port( [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] _ensure_no_port_binding_failure(port) [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] raise exception.PortBindingFailed(port_id=port['id']) [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] nova.exception.PortBindingFailed: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. [ 759.563144] env[61839]: ERROR nova.compute.manager [instance: 50140f35-6282-41dc-a66c-f041f33769d7] [ 759.563524] env[61839]: DEBUG nova.compute.utils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 759.565760] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.016s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.565760] env[61839]: INFO nova.compute.claims [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.569542] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Build of instance 50140f35-6282-41dc-a66c-f041f33769d7 was re-scheduled: Binding failed for port b1d03096-6b67-405d-bb2c-0df90e9beef4, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 759.569974] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 759.570212] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.570360] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquired lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.570521] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 759.609910] env[61839]: DEBUG nova.compute.manager [req-c4e34013-1738-4269-9b70-3fb6efde03e1 req-fad379ee-8c29-4dfa-b8ee-8ae9967f3874 service nova] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Received event network-vif-deleted-c786aa01-deb8-40f7-9a8a-7f953d974aff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 759.798699] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 759.798699] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 759.798699] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 759.798699] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Didn't find any instances for network info cache update. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 759.798956] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.799141] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.799305] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.799512] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.799601] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.799754] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.799885] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 759.800414] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.829205] env[61839]: INFO nova.compute.manager [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] [instance: 56993a6d-de55-4648-9fd9-31d06a57f300] Took 1.04 seconds to deallocate network for instance. [ 759.891613] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Releasing lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.892092] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 759.892303] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.892595] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43191bcb-89ac-4de2-a292-e68b02223fd1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.903164] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb87fbb-3aad-4e01-88a5-a79b745b7bbc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.929143] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59ea60d5-7296-480c-ac03-ec0a7c021300 could not be found. [ 759.929143] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.929143] env[61839]: INFO nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Took 0.04 seconds to destroy the instance on the hypervisor. [ 759.929143] env[61839]: DEBUG oslo.service.loopingcall [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.929143] env[61839]: DEBUG nova.compute.manager [-] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.929143] env[61839]: DEBUG nova.network.neutron [-] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.943508] env[61839]: DEBUG nova.network.neutron [-] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.094478] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.303667] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.356161] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.445527] env[61839]: DEBUG nova.network.neutron [-] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.859026] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Releasing lock "refresh_cache-50140f35-6282-41dc-a66c-f041f33769d7" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.859205] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 760.859389] env[61839]: DEBUG nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 760.859561] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 760.861675] env[61839]: INFO nova.scheduler.client.report [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Deleted allocations for instance 56993a6d-de55-4648-9fd9-31d06a57f300 [ 760.878463] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f86adf-0b90-4284-80a5-3b14bfcdf5d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.888409] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd577494-2320-4bae-9ff9-446424f82560 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.893598] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.919851] env[61839]: DEBUG nova.network.neutron [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.921475] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f042c5-6148-4259-80cc-11a005985650 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.930097] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b720d838-b60e-4155-95cc-9b1a8b6eacd4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.944052] env[61839]: DEBUG nova.compute.provider_tree [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.947622] env[61839]: INFO nova.compute.manager [-] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Took 1.02 seconds to deallocate network for instance. [ 760.949899] env[61839]: DEBUG nova.compute.claims [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.950122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.371906] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bfae6393-6992-4f8d-b92d-1fa0650840c0 tempest-ListImageFiltersTestJSON-1646419560 tempest-ListImageFiltersTestJSON-1646419560-project-member] Lock "56993a6d-de55-4648-9fd9-31d06a57f300" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 168.480s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.425675] env[61839]: INFO nova.compute.manager [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: 50140f35-6282-41dc-a66c-f041f33769d7] Took 0.57 seconds to deallocate network for instance. [ 761.450831] env[61839]: DEBUG nova.scheduler.client.report [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.874734] env[61839]: DEBUG nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 761.958827] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.958946] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 761.962370] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.276s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.398814] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.454567] env[61839]: INFO nova.scheduler.client.report [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Deleted allocations for instance 50140f35-6282-41dc-a66c-f041f33769d7 [ 762.467527] env[61839]: DEBUG nova.compute.utils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 762.471900] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 762.472098] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 762.549644] env[61839]: DEBUG nova.policy [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2cfaae5373da4d589df67d599d40366f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3d6228b03c4412695ef822ed618a27a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 762.785473] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c667a3-08a9-4d90-a791-d6be2dcd8cb8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.794586] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d040b466-bd3d-433b-9bda-2f3ff0a1ff2c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.824327] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb36fa2-b7a3-4f58-89ec-399ceefa8a98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.832389] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328932f0-a118-402f-a24a-88880a932bb1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.848084] env[61839]: DEBUG nova.compute.provider_tree [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.877511] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Successfully created port: 032feb82-164b-4265-a01b-c038ccb8a163 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.965063] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21cfb57d-6c01-48f6-81e9-529dd1bafa3c tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "50140f35-6282-41dc-a66c-f041f33769d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 166.167s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.972776] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 763.237118] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Successfully created port: b7c185a3-f38f-4131-b2d0-931a3eaa0a9b {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.351217] env[61839]: DEBUG nova.scheduler.client.report [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.469183] env[61839]: DEBUG nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 763.632144] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Successfully created port: ce775b96-08e4-4a53-b139-95f849846e04 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.860075] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.898s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.861012] env[61839]: ERROR nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Traceback (most recent call last): [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self.driver.spawn(context, instance, image_meta, [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] vm_ref = self.build_virtual_machine(instance, [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.861012] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] for vif in network_info: [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return self._sync_wrapper(fn, *args, **kwargs) [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self.wait() [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self[:] = self._gt.wait() [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return self._exit_event.wait() [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] result = hub.switch() [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.861487] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return self.greenlet.switch() [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] result = function(*args, **kwargs) [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] return func(*args, **kwargs) [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] raise e [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] nwinfo = self.network_api.allocate_for_instance( [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] created_port_ids = self._update_ports_for_instance( [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] with excutils.save_and_reraise_exception(): [ 763.861929] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] self.force_reraise() [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] raise self.value [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] updated_port = self._update_port( [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] _ensure_no_port_binding_failure(port) [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] raise exception.PortBindingFailed(port_id=port['id']) [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] nova.exception.PortBindingFailed: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. [ 763.862452] env[61839]: ERROR nova.compute.manager [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] [ 763.862844] env[61839]: DEBUG nova.compute.utils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 763.863981] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.851s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.865952] env[61839]: INFO nova.compute.claims [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.868936] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Build of instance a89e30e6-b727-440f-a1e8-9c86d19c796d was re-scheduled: Binding failed for port d741ef00-1358-49b5-ab14-a50a00d5024f, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 763.870418] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 763.870418] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquiring lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.870418] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Acquired lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.870418] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.982199] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 763.994391] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.013390] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.013662] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.013819] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.013996] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.014152] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.014298] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.014645] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.014919] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.015290] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.015585] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.015884] env[61839]: DEBUG nova.virt.hardware [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.016864] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac78e6e-0703-4f86-8402-eb4bab881036 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.026566] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96059c90-4bbb-494c-b6dc-6c3f6d3c1a08 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.414854] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.681421] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.185023] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Releasing lock "refresh_cache-a89e30e6-b727-440f-a1e8-9c86d19c796d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.185280] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 765.185460] env[61839]: DEBUG nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.185632] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.215802] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.238691] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceda92a3-a997-49ba-8a87-3e7c87bf0c4b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.246444] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de00e091-4101-4247-904c-9508299223b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.284492] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3834c3d0-f2d4-43c1-9ec9-ff4ca29e4604 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.291837] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71086066-892b-44f6-aa23-3bf12dcee7c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.309312] env[61839]: DEBUG nova.compute.provider_tree [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.352832] env[61839]: DEBUG nova.compute.manager [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Received event network-changed-032feb82-164b-4265-a01b-c038ccb8a163 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 765.353303] env[61839]: DEBUG nova.compute.manager [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Refreshing instance network info cache due to event network-changed-032feb82-164b-4265-a01b-c038ccb8a163. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 765.355962] env[61839]: DEBUG oslo_concurrency.lockutils [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] Acquiring lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.355962] env[61839]: DEBUG oslo_concurrency.lockutils [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] Acquired lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.355962] env[61839]: DEBUG nova.network.neutron [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Refreshing network info cache for port 032feb82-164b-4265-a01b-c038ccb8a163 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 765.722343] env[61839]: DEBUG nova.network.neutron [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.723402] env[61839]: ERROR nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. [ 765.723402] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 765.723402] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.723402] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 765.723402] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.723402] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 765.723402] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.723402] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 765.723402] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.723402] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 765.723402] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.723402] env[61839]: ERROR nova.compute.manager raise self.value [ 765.723402] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.723402] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 765.723402] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.723402] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 765.724241] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.724241] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 765.724241] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. [ 765.724241] env[61839]: ERROR nova.compute.manager [ 765.724241] env[61839]: Traceback (most recent call last): [ 765.724241] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 765.724241] env[61839]: listener.cb(fileno) [ 765.724241] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.724241] env[61839]: result = function(*args, **kwargs) [ 765.724241] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.724241] env[61839]: return func(*args, **kwargs) [ 765.724241] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.724241] env[61839]: raise e [ 765.724241] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.724241] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 765.724241] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.724241] env[61839]: created_port_ids = self._update_ports_for_instance( [ 765.724241] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.724241] env[61839]: with excutils.save_and_reraise_exception(): [ 765.724241] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.724241] env[61839]: self.force_reraise() [ 765.724241] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.724241] env[61839]: raise self.value [ 765.724241] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.724241] env[61839]: updated_port = self._update_port( [ 765.724241] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.724241] env[61839]: _ensure_no_port_binding_failure(port) [ 765.724241] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.724241] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 765.725292] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. [ 765.725292] env[61839]: Removing descriptor: 17 [ 765.725292] env[61839]: ERROR nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Traceback (most recent call last): [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] yield resources [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self.driver.spawn(context, instance, image_meta, [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.725292] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] vm_ref = self.build_virtual_machine(instance, [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] for vif in network_info: [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return self._sync_wrapper(fn, *args, **kwargs) [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self.wait() [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self[:] = self._gt.wait() [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return self._exit_event.wait() [ 765.726340] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] result = hub.switch() [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return self.greenlet.switch() [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] result = function(*args, **kwargs) [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return func(*args, **kwargs) [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] raise e [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] nwinfo = self.network_api.allocate_for_instance( [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.726776] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] created_port_ids = self._update_ports_for_instance( [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] with excutils.save_and_reraise_exception(): [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self.force_reraise() [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] raise self.value [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] updated_port = self._update_port( [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] _ensure_no_port_binding_failure(port) [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.727249] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] raise exception.PortBindingFailed(port_id=port['id']) [ 765.727613] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] nova.exception.PortBindingFailed: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. [ 765.727613] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] [ 765.727613] env[61839]: INFO nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Terminating instance [ 765.737135] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.813645] env[61839]: DEBUG nova.scheduler.client.report [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.942250] env[61839]: DEBUG nova.network.neutron [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.109679] env[61839]: DEBUG nova.network.neutron [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.238028] env[61839]: INFO nova.compute.manager [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] [instance: a89e30e6-b727-440f-a1e8-9c86d19c796d] Took 1.05 seconds to deallocate network for instance. [ 766.322762] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.323588] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 766.327127] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.330s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.612668] env[61839]: DEBUG oslo_concurrency.lockutils [req-6073b928-2f50-436f-94ac-41fd9dccff40 req-e5f89d4f-fc81-45f5-a9c2-72d0c0ae4bec service nova] Releasing lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.613154] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquired lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.613399] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.831029] env[61839]: DEBUG nova.compute.utils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 766.831029] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 766.831029] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.905504] env[61839]: DEBUG nova.policy [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34bcf22f1909452ea0f7fb5bb261f9cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf3071391e0246f491ff47c9bcf5d397', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 767.101960] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312059a7-d585-4dc7-bf7d-775acb0bbfc8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.113051] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5c5a20-0858-47c4-8a98-0d6967a98b96 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.146152] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b559d6cc-9892-4165-a908-5675619755e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.153410] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ff7cdb-26d3-4224-8623-73d5850c966a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.166403] env[61839]: DEBUG nova.compute.provider_tree [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.214028] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.272713] env[61839]: INFO nova.scheduler.client.report [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Deleted allocations for instance a89e30e6-b727-440f-a1e8-9c86d19c796d [ 767.334985] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 767.368419] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.410492] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.410730] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.431530] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Successfully created port: ceaf8820-e4ad-40ab-b463-0c7c1c6fa156 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.446484] env[61839]: DEBUG nova.compute.manager [req-62d22470-0460-43b8-ba83-a75640c58cb1 req-93029916-c3e3-44c4-b1b9-b4bcee34b062 service nova] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Received event network-vif-deleted-032feb82-164b-4265-a01b-c038ccb8a163 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 767.670631] env[61839]: DEBUG nova.scheduler.client.report [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 767.783441] env[61839]: DEBUG oslo_concurrency.lockutils [None req-871442e2-b578-4d92-b7ce-9775c2859117 tempest-ServersAdminTestJSON-399292093 tempest-ServersAdminTestJSON-399292093-project-member] Lock "a89e30e6-b727-440f-a1e8-9c86d19c796d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 169.755s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.871885] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Releasing lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.871885] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 767.871885] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.872263] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec20ab79-4a61-4bcc-9520-cc439915d5d1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.884565] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26daea92-f455-4abe-b247-728b3b956729 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.907434] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 619ec15b-463a-4daa-bffe-7d7a6022b962 could not be found. [ 767.907665] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 767.907849] env[61839]: INFO nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Took 0.04 seconds to destroy the instance on the hypervisor. [ 767.908122] env[61839]: DEBUG oslo.service.loopingcall [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.908942] env[61839]: DEBUG nova.compute.manager [-] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 767.908942] env[61839]: DEBUG nova.network.neutron [-] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.971786] env[61839]: DEBUG nova.network.neutron [-] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.176260] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.849s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.177049] env[61839]: ERROR nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Traceback (most recent call last): [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self.driver.spawn(context, instance, image_meta, [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] vm_ref = self.build_virtual_machine(instance, [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 768.177049] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] for vif in network_info: [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return self._sync_wrapper(fn, *args, **kwargs) [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self.wait() [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self[:] = self._gt.wait() [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return self._exit_event.wait() [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] result = hub.switch() [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 768.177435] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return self.greenlet.switch() [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] result = function(*args, **kwargs) [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] return func(*args, **kwargs) [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] raise e [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] nwinfo = self.network_api.allocate_for_instance( [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] created_port_ids = self._update_ports_for_instance( [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] with excutils.save_and_reraise_exception(): [ 768.177856] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] self.force_reraise() [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] raise self.value [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] updated_port = self._update_port( [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] _ensure_no_port_binding_failure(port) [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] raise exception.PortBindingFailed(port_id=port['id']) [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] nova.exception.PortBindingFailed: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. [ 768.178268] env[61839]: ERROR nova.compute.manager [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] [ 768.178676] env[61839]: DEBUG nova.compute.utils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 768.179615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.901s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.182088] env[61839]: INFO nova.compute.claims [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.185290] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Build of instance 337b31e7-a6c9-4f35-9936-62cff06fe2a1 was re-scheduled: Binding failed for port 499ebbdf-24ab-4dda-b4db-a9c532f944e3, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 768.186516] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 768.186821] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.187037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.187260] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.285933] env[61839]: DEBUG nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 768.343865] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 768.378295] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 768.378295] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 768.378295] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.378459] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 768.378459] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.378459] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 768.378459] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 768.378459] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 768.378637] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 768.378637] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 768.378881] env[61839]: DEBUG nova.virt.hardware [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.381312] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7900d8-1cbe-4b69-bbfc-c9c5fd2dc27d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.389936] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3696af8d-b86c-4603-b060-006aa37ce8d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.715904] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.816594] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.928686] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.019939] env[61839]: ERROR nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. [ 769.019939] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 769.019939] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.019939] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 769.019939] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.019939] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 769.019939] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.019939] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 769.019939] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.019939] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 769.019939] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.019939] env[61839]: ERROR nova.compute.manager raise self.value [ 769.019939] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.019939] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 769.019939] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.019939] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 769.020567] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.020567] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 769.020567] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. [ 769.020567] env[61839]: ERROR nova.compute.manager [ 769.020789] env[61839]: Traceback (most recent call last): [ 769.022181] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 769.022181] env[61839]: listener.cb(fileno) [ 769.022181] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.022181] env[61839]: result = function(*args, **kwargs) [ 769.022181] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.022181] env[61839]: return func(*args, **kwargs) [ 769.022181] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 769.022181] env[61839]: raise e [ 769.022181] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.022181] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 769.022181] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.022181] env[61839]: created_port_ids = self._update_ports_for_instance( [ 769.022181] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.022181] env[61839]: with excutils.save_and_reraise_exception(): [ 769.022181] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.022181] env[61839]: self.force_reraise() [ 769.022181] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.022181] env[61839]: raise self.value [ 769.022181] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.022181] env[61839]: updated_port = self._update_port( [ 769.022181] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.022181] env[61839]: _ensure_no_port_binding_failure(port) [ 769.022181] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.022181] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 769.022181] env[61839]: nova.exception.PortBindingFailed: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. [ 769.022181] env[61839]: Removing descriptor: 17 [ 769.023431] env[61839]: ERROR nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Traceback (most recent call last): [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] yield resources [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self.driver.spawn(context, instance, image_meta, [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] vm_ref = self.build_virtual_machine(instance, [ 769.023431] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] vif_infos = vmwarevif.get_vif_info(self._session, [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] for vif in network_info: [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return self._sync_wrapper(fn, *args, **kwargs) [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self.wait() [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self[:] = self._gt.wait() [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return self._exit_event.wait() [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 769.023859] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] result = hub.switch() [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return self.greenlet.switch() [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] result = function(*args, **kwargs) [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return func(*args, **kwargs) [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] raise e [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] nwinfo = self.network_api.allocate_for_instance( [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] created_port_ids = self._update_ports_for_instance( [ 769.025275] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] with excutils.save_and_reraise_exception(): [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self.force_reraise() [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] raise self.value [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] updated_port = self._update_port( [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] _ensure_no_port_binding_failure(port) [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] raise exception.PortBindingFailed(port_id=port['id']) [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] nova.exception.PortBindingFailed: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. [ 769.025780] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] [ 769.026244] env[61839]: INFO nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Terminating instance [ 769.026733] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Acquiring lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.026931] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Acquired lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.027176] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.420831] env[61839]: DEBUG nova.network.neutron [-] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.433224] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "refresh_cache-337b31e7-a6c9-4f35-9936-62cff06fe2a1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.433552] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 769.433794] env[61839]: DEBUG nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 769.433989] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.463460] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.515257] env[61839]: DEBUG nova.compute.manager [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Received event network-changed-ceaf8820-e4ad-40ab-b463-0c7c1c6fa156 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 769.515425] env[61839]: DEBUG nova.compute.manager [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Refreshing instance network info cache due to event network-changed-ceaf8820-e4ad-40ab-b463-0c7c1c6fa156. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 769.515757] env[61839]: DEBUG oslo_concurrency.lockutils [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] Acquiring lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.556017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c566c1-a635-4c96-a97c-fcb9cccb0133 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.564109] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f839292d-a462-4ba6-b3fd-826f527835cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.595687] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2716b79d-41ae-4173-a5d8-53df581cd485 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.602946] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89596fb2-1f8d-487c-b48e-6b24beb631b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.616107] env[61839]: DEBUG nova.compute.provider_tree [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.669878] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.819635] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.923539] env[61839]: INFO nova.compute.manager [-] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Took 2.01 seconds to deallocate network for instance. [ 769.925858] env[61839]: DEBUG nova.compute.claims [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 769.926049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.968169] env[61839]: DEBUG nova.network.neutron [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.121647] env[61839]: DEBUG nova.scheduler.client.report [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 770.325023] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Releasing lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.325023] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 770.325023] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 770.325023] env[61839]: DEBUG oslo_concurrency.lockutils [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] Acquired lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.325023] env[61839]: DEBUG nova.network.neutron [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Refreshing network info cache for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 770.325229] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44e7c3a0-6c25-420a-84d2-5619339c006b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.335048] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8cd96e-0485-4b14-86e6-e06b840dbc34 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.355817] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 821b784d-dc69-4c54-bccf-76693c34e19d could not be found. [ 770.356065] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.356237] env[61839]: INFO nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 770.356486] env[61839]: DEBUG oslo.service.loopingcall [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.356708] env[61839]: DEBUG nova.compute.manager [-] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 770.356833] env[61839]: DEBUG nova.network.neutron [-] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.381441] env[61839]: DEBUG nova.network.neutron [-] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.472084] env[61839]: INFO nova.compute.manager [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 337b31e7-a6c9-4f35-9936-62cff06fe2a1] Took 1.04 seconds to deallocate network for instance. [ 770.627230] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.627760] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 770.631238] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.499s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.633776] env[61839]: INFO nova.compute.claims [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 770.848245] env[61839]: DEBUG nova.network.neutron [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.884505] env[61839]: DEBUG nova.network.neutron [-] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.004693] env[61839]: DEBUG nova.network.neutron [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.135047] env[61839]: DEBUG nova.compute.utils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 771.135047] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 771.135287] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.191790] env[61839]: DEBUG nova.policy [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9360f9fcba74a5aae33fdec3660b790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fefa976ea98445f1b4e719d3e3a1e8af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 771.387404] env[61839]: INFO nova.compute.manager [-] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Took 1.03 seconds to deallocate network for instance. [ 771.390270] env[61839]: DEBUG nova.compute.claims [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 771.390518] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.504029] env[61839]: INFO nova.scheduler.client.report [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Deleted allocations for instance 337b31e7-a6c9-4f35-9936-62cff06fe2a1 [ 771.513226] env[61839]: DEBUG oslo_concurrency.lockutils [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] Releasing lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.513521] env[61839]: DEBUG nova.compute.manager [req-43edaaa9-de10-4b38-8635-2d4239d2398c req-0f1e5da6-6b72-46ea-bcaa-b7ee341e4a6c service nova] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Received event network-vif-deleted-ceaf8820-e4ad-40ab-b463-0c7c1c6fa156 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 771.543591] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Successfully created port: c5255785-dcf2-4894-812d-0a92bec33b87 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.640408] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 771.939404] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37295ff4-0327-49e8-bb62-4085aed83481 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.947337] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d385347-a4bd-42d0-90c7-cb579964c864 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.977690] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5059825c-2537-4652-a3ce-9727afbea220 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.985066] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72813c44-2d65-4397-9989-7645a006160e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.998125] env[61839]: DEBUG nova.compute.provider_tree [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.017128] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e1fbba5-4b37-438c-a0c7-7d09c658ba03 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "337b31e7-a6c9-4f35-9936-62cff06fe2a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 171.558s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.501436] env[61839]: DEBUG nova.scheduler.client.report [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.522121] env[61839]: DEBUG nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 772.626572] env[61839]: DEBUG nova.compute.manager [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Received event network-changed-c5255785-dcf2-4894-812d-0a92bec33b87 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 772.627257] env[61839]: DEBUG nova.compute.manager [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Refreshing instance network info cache due to event network-changed-c5255785-dcf2-4894-812d-0a92bec33b87. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 772.627631] env[61839]: DEBUG oslo_concurrency.lockutils [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] Acquiring lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.628023] env[61839]: DEBUG oslo_concurrency.lockutils [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] Acquired lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.628114] env[61839]: DEBUG nova.network.neutron [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Refreshing network info cache for port c5255785-dcf2-4894-812d-0a92bec33b87 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.653761] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 772.677247] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 772.677496] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 772.677643] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 772.677817] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 772.677957] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 772.678113] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 772.678308] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 772.678467] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 772.678633] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 772.680018] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 772.680018] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 772.680810] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9440a7-9abf-4921-a821-88c0a89a9c41 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.689751] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831bba49-d312-4c96-9c87-d799da4881a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.917160] env[61839]: ERROR nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. [ 772.917160] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 772.917160] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.917160] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 772.917160] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.917160] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 772.917160] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.917160] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 772.917160] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.917160] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 772.917160] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.917160] env[61839]: ERROR nova.compute.manager raise self.value [ 772.917160] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.917160] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 772.917160] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.917160] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 772.917526] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.917526] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 772.917526] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. [ 772.917526] env[61839]: ERROR nova.compute.manager [ 772.917526] env[61839]: Traceback (most recent call last): [ 772.917526] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 772.917526] env[61839]: listener.cb(fileno) [ 772.917526] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.917526] env[61839]: result = function(*args, **kwargs) [ 772.917526] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 772.917526] env[61839]: return func(*args, **kwargs) [ 772.917526] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.917526] env[61839]: raise e [ 772.917526] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.917526] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 772.917526] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.917526] env[61839]: created_port_ids = self._update_ports_for_instance( [ 772.917526] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.917526] env[61839]: with excutils.save_and_reraise_exception(): [ 772.917526] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.917526] env[61839]: self.force_reraise() [ 772.917526] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.917526] env[61839]: raise self.value [ 772.917526] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.917526] env[61839]: updated_port = self._update_port( [ 772.917526] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.917526] env[61839]: _ensure_no_port_binding_failure(port) [ 772.917526] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.917526] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 772.918151] env[61839]: nova.exception.PortBindingFailed: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. [ 772.918151] env[61839]: Removing descriptor: 17 [ 772.918151] env[61839]: ERROR nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Traceback (most recent call last): [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] yield resources [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self.driver.spawn(context, instance, image_meta, [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 772.918151] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] vm_ref = self.build_virtual_machine(instance, [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] vif_infos = vmwarevif.get_vif_info(self._session, [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] for vif in network_info: [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return self._sync_wrapper(fn, *args, **kwargs) [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self.wait() [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self[:] = self._gt.wait() [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return self._exit_event.wait() [ 772.918424] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] result = hub.switch() [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return self.greenlet.switch() [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] result = function(*args, **kwargs) [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return func(*args, **kwargs) [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] raise e [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] nwinfo = self.network_api.allocate_for_instance( [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.918681] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] created_port_ids = self._update_ports_for_instance( [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] with excutils.save_and_reraise_exception(): [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self.force_reraise() [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] raise self.value [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] updated_port = self._update_port( [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] _ensure_no_port_binding_failure(port) [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.918951] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] raise exception.PortBindingFailed(port_id=port['id']) [ 772.919216] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] nova.exception.PortBindingFailed: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. [ 772.919216] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] [ 772.919216] env[61839]: INFO nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Terminating instance [ 772.920415] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.009604] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.010233] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 773.017020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.937s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.017020] env[61839]: INFO nova.compute.claims [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.044358] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.147340] env[61839]: DEBUG nova.network.neutron [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.257070] env[61839]: DEBUG nova.network.neutron [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.522137] env[61839]: DEBUG nova.compute.utils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 773.524683] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 773.524683] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 773.579752] env[61839]: DEBUG nova.policy [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9360f9fcba74a5aae33fdec3660b790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fefa976ea98445f1b4e719d3e3a1e8af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 773.759828] env[61839]: DEBUG oslo_concurrency.lockutils [req-97111600-158b-4c5b-85cb-64aa79b0203f req-7373e93e-27c5-49e9-b2bd-ae47c5f71415 service nova] Releasing lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.760274] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.760466] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.860416] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Successfully created port: 962e76d2-2d5b-466c-9652-29ee2cfdd960 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 774.027114] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 774.287986] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.301285] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a3af32-0d1b-43d8-8c6a-ccc8924f110a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.309937] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b7406f-3e5d-48af-a07e-0fb897261c42 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.341339] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904b5e94-845c-4e2c-a3b1-8157d98f6acb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.348630] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a248b66-202a-443f-81f5-2254dea8b600 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.361542] env[61839]: DEBUG nova.compute.provider_tree [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.475490] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.713675] env[61839]: DEBUG nova.compute.manager [req-e89bbc71-823a-4fab-91fb-9c69e4cc2ece req-6cbc3fde-df4f-4d17-abc0-f7c1b67b8fe4 service nova] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Received event network-vif-deleted-c5255785-dcf2-4894-812d-0a92bec33b87 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 774.865047] env[61839]: DEBUG nova.scheduler.client.report [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.977589] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.978040] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 774.978245] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 774.978533] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ab96fac-c1de-4cc5-a133-8a4e127a5776 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.990914] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4bd395-295a-4502-8f5b-54396910968e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.013347] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e81bf730-9cf6-4728-aae4-4962115f8b6f could not be found. [ 775.013637] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.013841] env[61839]: INFO nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 775.014174] env[61839]: DEBUG oslo.service.loopingcall [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.014438] env[61839]: DEBUG nova.compute.manager [-] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.014563] env[61839]: DEBUG nova.network.neutron [-] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 775.035806] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 775.038551] env[61839]: DEBUG nova.network.neutron [-] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.061605] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 775.061605] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 775.061605] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 775.061732] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 775.061732] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 775.061732] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 775.062687] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 775.062687] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 775.062687] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 775.062687] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 775.062687] env[61839]: DEBUG nova.virt.hardware [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 775.063745] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5217d31-5e79-40a3-af79-dbd35af6b99f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.071943] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e0d4ff-57cc-47b2-a859-3616881a3c9b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.161177] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.161177] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.164799] env[61839]: ERROR nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. [ 775.164799] env[61839]: ERROR nova.compute.manager Traceback (most recent call last): [ 775.164799] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 775.164799] env[61839]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 775.164799] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 775.164799] env[61839]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 775.164799] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 775.164799] env[61839]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 775.164799] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.164799] env[61839]: ERROR nova.compute.manager self.force_reraise() [ 775.164799] env[61839]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.164799] env[61839]: ERROR nova.compute.manager raise self.value [ 775.164799] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 775.164799] env[61839]: ERROR nova.compute.manager updated_port = self._update_port( [ 775.164799] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.164799] env[61839]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 775.165158] env[61839]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.165158] env[61839]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 775.165158] env[61839]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. [ 775.165158] env[61839]: ERROR nova.compute.manager [ 775.165158] env[61839]: Traceback (most recent call last): [ 775.165158] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 775.165158] env[61839]: listener.cb(fileno) [ 775.165158] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 775.165158] env[61839]: result = function(*args, **kwargs) [ 775.165158] env[61839]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 775.165158] env[61839]: return func(*args, **kwargs) [ 775.165158] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 775.165158] env[61839]: raise e [ 775.165158] env[61839]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 775.165158] env[61839]: nwinfo = self.network_api.allocate_for_instance( [ 775.165158] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 775.165158] env[61839]: created_port_ids = self._update_ports_for_instance( [ 775.165158] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 775.165158] env[61839]: with excutils.save_and_reraise_exception(): [ 775.165158] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.165158] env[61839]: self.force_reraise() [ 775.165158] env[61839]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.165158] env[61839]: raise self.value [ 775.165158] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 775.165158] env[61839]: updated_port = self._update_port( [ 775.165158] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.165158] env[61839]: _ensure_no_port_binding_failure(port) [ 775.165158] env[61839]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.165158] env[61839]: raise exception.PortBindingFailed(port_id=port['id']) [ 775.165935] env[61839]: nova.exception.PortBindingFailed: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. [ 775.165935] env[61839]: Removing descriptor: 17 [ 775.165935] env[61839]: ERROR nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Traceback (most recent call last): [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] yield resources [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self.driver.spawn(context, instance, image_meta, [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self._vmops.spawn(context, instance, image_meta, injected_files, [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 775.165935] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] vm_ref = self.build_virtual_machine(instance, [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] vif_infos = vmwarevif.get_vif_info(self._session, [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] for vif in network_info: [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return self._sync_wrapper(fn, *args, **kwargs) [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self.wait() [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self[:] = self._gt.wait() [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return self._exit_event.wait() [ 775.166265] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] result = hub.switch() [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return self.greenlet.switch() [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] result = function(*args, **kwargs) [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return func(*args, **kwargs) [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] raise e [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] nwinfo = self.network_api.allocate_for_instance( [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 775.166599] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] created_port_ids = self._update_ports_for_instance( [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] with excutils.save_and_reraise_exception(): [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self.force_reraise() [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] raise self.value [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] updated_port = self._update_port( [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] _ensure_no_port_binding_failure(port) [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.166933] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] raise exception.PortBindingFailed(port_id=port['id']) [ 775.167240] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] nova.exception.PortBindingFailed: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. [ 775.167240] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] [ 775.167240] env[61839]: INFO nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Terminating instance [ 775.167662] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.167819] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.167985] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.370770] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.371328] env[61839]: DEBUG nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 775.374503] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.071s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.374503] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.374626] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 775.374897] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.425s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.378092] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3081641b-a0cd-4ad3-be8a-799c595cc6c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.386103] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a940be74-0515-453c-bd54-f4bc63955e93 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.399656] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1639b75c-7966-4c0f-88bc-176e192b4386 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.407202] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e83ab7d-859e-4ddf-92b8-575defc42c75 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.438110] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181409MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 775.438276] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.541168] env[61839]: DEBUG nova.network.neutron [-] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.700388] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.804616] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.879791] env[61839]: DEBUG nova.compute.utils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 775.882840] env[61839]: DEBUG nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Not allocating networking since 'none' was specified. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 775.901510] env[61839]: DEBUG nova.scheduler.client.report [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 775.917811] env[61839]: DEBUG nova.scheduler.client.report [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 775.918050] env[61839]: DEBUG nova.compute.provider_tree [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.928895] env[61839]: DEBUG nova.scheduler.client.report [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 775.946063] env[61839]: DEBUG nova.scheduler.client.report [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 776.043635] env[61839]: INFO nova.compute.manager [-] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Took 1.03 seconds to deallocate network for instance. [ 776.046230] env[61839]: DEBUG nova.compute.claims [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 776.046412] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.296852] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a0458e-2590-43bd-a198-549d7574c8d9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.307026] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851ae85d-fae5-406b-aeaa-a5f27be08d22 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.307725] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.308124] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 776.308316] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 776.308581] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf3f4f31-89e4-4d8f-88d0-518c552c92d2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.337310] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e28b1b-0014-4f33-94c9-d998002f1fbd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.342253] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab9ba08-577c-495a-be0a-103322559af8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.356887] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8eb0514-d2b9-45c4-9846-3f2f5451959d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.363914] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 697222e0-07e5-4a3d-adbe-d5d815cf4756 could not be found. [ 776.364130] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.364314] env[61839]: INFO nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Took 0.06 seconds to destroy the instance on the hypervisor. [ 776.364639] env[61839]: DEBUG oslo.service.loopingcall [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.365174] env[61839]: DEBUG nova.compute.manager [-] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 776.365306] env[61839]: DEBUG nova.network.neutron [-] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.374364] env[61839]: DEBUG nova.compute.provider_tree [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.386453] env[61839]: DEBUG nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 776.389382] env[61839]: DEBUG nova.network.neutron [-] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.782628] env[61839]: DEBUG nova.compute.manager [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Received event network-changed-962e76d2-2d5b-466c-9652-29ee2cfdd960 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 776.782790] env[61839]: DEBUG nova.compute.manager [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Refreshing instance network info cache due to event network-changed-962e76d2-2d5b-466c-9652-29ee2cfdd960. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 776.783019] env[61839]: DEBUG oslo_concurrency.lockutils [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] Acquiring lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.783159] env[61839]: DEBUG oslo_concurrency.lockutils [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] Acquired lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.783826] env[61839]: DEBUG nova.network.neutron [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Refreshing network info cache for port 962e76d2-2d5b-466c-9652-29ee2cfdd960 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.877708] env[61839]: DEBUG nova.scheduler.client.report [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.894516] env[61839]: DEBUG nova.network.neutron [-] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.310102] env[61839]: DEBUG nova.network.neutron [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.383500] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.008s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.384298] env[61839]: ERROR nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Traceback (most recent call last): [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self.driver.spawn(context, instance, image_meta, [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self._vmops.spawn(context, instance, image_meta, injected_files, [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] vm_ref = self.build_virtual_machine(instance, [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] vif_infos = vmwarevif.get_vif_info(self._session, [ 777.384298] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] for vif in network_info: [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] return self._sync_wrapper(fn, *args, **kwargs) [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self.wait() [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self[:] = self._gt.wait() [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] return self._exit_event.wait() [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] current.throw(*self._exc) [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 777.384666] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] result = function(*args, **kwargs) [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] return func(*args, **kwargs) [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] raise e [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] nwinfo = self.network_api.allocate_for_instance( [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] created_port_ids = self._update_ports_for_instance( [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] with excutils.save_and_reraise_exception(): [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] self.force_reraise() [ 777.385012] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] raise self.value [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] updated_port = self._update_port( [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] _ensure_no_port_binding_failure(port) [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] raise exception.PortBindingFailed(port_id=port['id']) [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] nova.exception.PortBindingFailed: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. [ 777.385344] env[61839]: ERROR nova.compute.manager [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] [ 777.385344] env[61839]: DEBUG nova.compute.utils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 777.386201] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.987s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.387691] env[61839]: INFO nova.compute.claims [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.390224] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Build of instance 59ea60d5-7296-480c-ac03-ec0a7c021300 was re-scheduled: Binding failed for port c786aa01-deb8-40f7-9a8a-7f953d974aff, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 777.390651] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 777.390872] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Acquiring lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.391028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Acquired lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.391192] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.398031] env[61839]: DEBUG nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 777.399961] env[61839]: INFO nova.compute.manager [-] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Took 1.03 seconds to deallocate network for instance. [ 777.402479] env[61839]: DEBUG nova.compute.claims [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Aborting claim: {{(pid=61839) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 777.402650] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.411652] env[61839]: DEBUG nova.network.neutron [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.424622] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 777.425834] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 777.425834] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.425834] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 777.425834] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.425834] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 777.425982] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 777.425982] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 777.425982] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 777.426137] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 777.426272] env[61839]: DEBUG nova.virt.hardware [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 777.427127] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0f8850-4c8d-4c11-b45b-a5a5b8b27bc3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.437053] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9757db95-1cc4-4563-b64c-0c05cfe24db0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.450120] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.455842] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Creating folder: Project (1d38ccb37abd44a6aa5755ee02d51215). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.456144] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f07d2c37-11bb-4e85-a83e-a58c7cf95d38 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.465896] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Created folder: Project (1d38ccb37abd44a6aa5755ee02d51215) in parent group-v281288. [ 777.466102] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Creating folder: Instances. Parent ref: group-v281309. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.466582] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3d9c54c-7aa4-463c-8760-0231446ea38a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.475520] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Created folder: Instances in parent group-v281309. [ 777.475747] env[61839]: DEBUG oslo.service.loopingcall [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.475947] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.476162] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e68d47fd-05af-4e88-85f9-9f1adece2950 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.494740] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.494740] env[61839]: value = "task-1314314" [ 777.494740] env[61839]: _type = "Task" [ 777.494740] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.502304] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314314, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.912507] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.914663] env[61839]: DEBUG oslo_concurrency.lockutils [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] Releasing lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.914993] env[61839]: DEBUG nova.compute.manager [req-12c30394-6cf2-459b-9df3-a8e9aa4e0487 req-ad7379db-d86a-416e-844d-77dc45ac0cd5 service nova] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Received event network-vif-deleted-962e76d2-2d5b-466c-9652-29ee2cfdd960 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 778.004699] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314314, 'name': CreateVM_Task, 'duration_secs': 0.301012} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.005967] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.005967] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.005967] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.006450] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 778.006788] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84940efc-05af-4bbc-a6d5-a0e6d13e4e8e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.011385] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 778.011385] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52815ee0-d36d-0adf-31c1-025b94089296" [ 778.011385] env[61839]: _type = "Task" [ 778.011385] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.020648] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52815ee0-d36d-0adf-31c1-025b94089296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.031914] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.147439] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Releasing lock "refresh_cache-59ea60d5-7296-480c-ac03-ec0a7c021300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.147782] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 779.147997] env[61839]: DEBUG nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 779.147997] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.158928] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52815ee0-d36d-0adf-31c1-025b94089296, 'name': SearchDatastore_Task, 'duration_secs': 0.622099} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.159179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.159400] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.159626] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.159771] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.159942] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 779.160198] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abadee69-6e55-4b89-8f8a-37b9dd8ecf3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.165934] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.168114] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 779.168275] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 779.168927] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5813bef4-51a1-46a9-bac9-4190aacd6928 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.173501] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 779.173501] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d791ee-1359-25e0-dba5-e47c5ca407bf" [ 779.173501] env[61839]: _type = "Task" [ 779.173501] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.183382] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d791ee-1359-25e0-dba5-e47c5ca407bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.375130] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281af734-fa65-4084-915c-cfb34b1c5364 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.383248] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcb2a36-f918-425d-8056-42c2f49affa3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.413359] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4b4543-3e53-4aa2-8e7f-53de7be86ffd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.420576] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a25cf3-f0c7-4410-969a-46c7e8ddc018 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.433756] env[61839]: DEBUG nova.compute.provider_tree [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.668778] env[61839]: DEBUG nova.network.neutron [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.686693] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d791ee-1359-25e0-dba5-e47c5ca407bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009849} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.687429] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea8110cd-ebf8-4287-987a-4fbc2b0848b9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.692879] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 779.692879] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52424981-8956-af11-7cb2-2072b735bc59" [ 779.692879] env[61839]: _type = "Task" [ 779.692879] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.700274] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52424981-8956-af11-7cb2-2072b735bc59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.936923] env[61839]: DEBUG nova.scheduler.client.report [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 780.171845] env[61839]: INFO nova.compute.manager [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] [instance: 59ea60d5-7296-480c-ac03-ec0a7c021300] Took 1.02 seconds to deallocate network for instance. [ 780.209833] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52424981-8956-af11-7cb2-2072b735bc59, 'name': SearchDatastore_Task, 'duration_secs': 0.009497} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.210870] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.211388] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 0bc0eefd-8a56-4cd6-a0b5-818cc437d917/0bc0eefd-8a56-4cd6-a0b5-818cc437d917.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 780.211708] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ea694eb-2dff-4cca-85da-7b328fbbb28c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.221028] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 780.221028] env[61839]: value = "task-1314315" [ 780.221028] env[61839]: _type = "Task" [ 780.221028] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.227426] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.442450] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.056s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.442951] env[61839]: DEBUG nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 780.445869] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.452s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.447294] env[61839]: INFO nova.compute.claims [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.730755] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314315, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.954195] env[61839]: DEBUG nova.compute.utils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 780.955625] env[61839]: DEBUG nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 780.955833] env[61839]: DEBUG nova.network.neutron [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 780.995579] env[61839]: DEBUG nova.policy [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71c8c05326254b0cbada5839fb86bd08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f95ba3c476274972b332b5f24ad7cb1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 781.205942] env[61839]: INFO nova.scheduler.client.report [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Deleted allocations for instance 59ea60d5-7296-480c-ac03-ec0a7c021300 [ 781.230637] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512818} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.230735] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 0bc0eefd-8a56-4cd6-a0b5-818cc437d917/0bc0eefd-8a56-4cd6-a0b5-818cc437d917.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 781.230963] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 781.231248] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdf1caeb-dd16-4cbe-923e-0de4111f5529 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.237916] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 781.237916] env[61839]: value = "task-1314316" [ 781.237916] env[61839]: _type = "Task" [ 781.237916] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.246380] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314316, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.320141] env[61839]: DEBUG nova.network.neutron [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Successfully created port: bcfc5607-df16-4357-b78d-33b0451f9bb6 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.460467] env[61839]: DEBUG nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 781.707338] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41117d4-e9b3-4da0-be01-9783274bcc81 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.714229] env[61839]: DEBUG oslo_concurrency.lockutils [None req-73f4029e-d3f8-4a4c-9058-31fdb7ebb120 tempest-ServerRescueTestJSONUnderV235-933831856 tempest-ServerRescueTestJSONUnderV235-933831856-project-member] Lock "59ea60d5-7296-480c-ac03-ec0a7c021300" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 179.083s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.717914] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e692109f-c895-4563-bac8-1544c63dd16d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.749180] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5af685d-35d5-43e2-8f16-6e023f3c4584 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.758156] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314316, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070138} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.760322] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.760940] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c60fe1-f9c2-47d0-94f3-2482302b2c52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.764086] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87012cd-3967-4888-a1bd-c46fdaf10ac8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.778903] env[61839]: DEBUG nova.compute.provider_tree [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.797018] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 0bc0eefd-8a56-4cd6-a0b5-818cc437d917/0bc0eefd-8a56-4cd6-a0b5-818cc437d917.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.798725] env[61839]: DEBUG nova.scheduler.client.report [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.802084] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7888b393-3968-4073-ad2c-0c7058051b8c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.818728] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.373s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.819239] env[61839]: DEBUG nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 781.821810] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.005s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.824852] env[61839]: INFO nova.compute.claims [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.833645] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 781.833645] env[61839]: value = "task-1314317" [ 781.833645] env[61839]: _type = "Task" [ 781.833645] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.842090] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314317, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.220711] env[61839]: DEBUG nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 782.331819] env[61839]: DEBUG nova.compute.utils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 782.333365] env[61839]: DEBUG nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 782.333572] env[61839]: DEBUG nova.network.neutron [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 782.345764] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314317, 'name': ReconfigVM_Task, 'duration_secs': 0.26021} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.346469] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 0bc0eefd-8a56-4cd6-a0b5-818cc437d917/0bc0eefd-8a56-4cd6-a0b5-818cc437d917.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.347065] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5604938d-5115-414e-a2a2-b62890556c79 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.354297] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 782.354297] env[61839]: value = "task-1314318" [ 782.354297] env[61839]: _type = "Task" [ 782.354297] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.363502] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314318, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.386144] env[61839]: DEBUG nova.policy [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de344d8cc13340d7affed971d75f486d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '251b0d2531ba4f14a2eb6ea75382c418', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 782.473529] env[61839]: DEBUG nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 782.503021] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 782.503021] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 782.503021] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.503257] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 782.503257] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.503257] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 782.503257] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 782.503257] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 782.503374] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 782.503374] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 782.503516] env[61839]: DEBUG nova.virt.hardware [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.504679] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b12e37-bddb-4003-94a1-38114333652e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.513202] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0b2a64-c4ca-4f2b-805c-e93126dcee8e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.746035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.768009] env[61839]: DEBUG nova.network.neutron [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Successfully created port: 140a636b-8035-4bf7-8462-ae02a41f2316 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.839390] env[61839]: DEBUG nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 782.865869] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314318, 'name': Rename_Task, 'duration_secs': 0.143625} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.867883] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.867883] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a189da57-423f-4389-b92a-8b55c0f4a0c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.876244] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 782.876244] env[61839]: value = "task-1314319" [ 782.876244] env[61839]: _type = "Task" [ 782.876244] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.891470] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314319, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.134787] env[61839]: DEBUG nova.compute.manager [req-15f692b6-ce79-4bd9-9a7e-b5ce979fe8c5 req-60a31160-3866-4a6f-bd32-8a53d5dc7db7 service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Received event network-vif-plugged-bcfc5607-df16-4357-b78d-33b0451f9bb6 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.134787] env[61839]: DEBUG oslo_concurrency.lockutils [req-15f692b6-ce79-4bd9-9a7e-b5ce979fe8c5 req-60a31160-3866-4a6f-bd32-8a53d5dc7db7 service nova] Acquiring lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.134945] env[61839]: DEBUG oslo_concurrency.lockutils [req-15f692b6-ce79-4bd9-9a7e-b5ce979fe8c5 req-60a31160-3866-4a6f-bd32-8a53d5dc7db7 service nova] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.136611] env[61839]: DEBUG oslo_concurrency.lockutils [req-15f692b6-ce79-4bd9-9a7e-b5ce979fe8c5 req-60a31160-3866-4a6f-bd32-8a53d5dc7db7 service nova] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.136723] env[61839]: DEBUG nova.compute.manager [req-15f692b6-ce79-4bd9-9a7e-b5ce979fe8c5 req-60a31160-3866-4a6f-bd32-8a53d5dc7db7 service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] No waiting events found dispatching network-vif-plugged-bcfc5607-df16-4357-b78d-33b0451f9bb6 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 783.136900] env[61839]: WARNING nova.compute.manager [req-15f692b6-ce79-4bd9-9a7e-b5ce979fe8c5 req-60a31160-3866-4a6f-bd32-8a53d5dc7db7 service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Received unexpected event network-vif-plugged-bcfc5607-df16-4357-b78d-33b0451f9bb6 for instance with vm_state building and task_state spawning. [ 783.196363] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620ab5e8-4836-4da3-bc23-251edeaeb7c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.205824] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b21f912-84c3-4a39-ba97-338e432b1e63 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.239732] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5b0572-ba97-4e99-87b5-b6da46eca756 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.247596] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6ceb49-a52d-4c2b-a03c-be3ab8c6b280 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.263311] env[61839]: DEBUG nova.compute.provider_tree [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.392102] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314319, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.619394] env[61839]: DEBUG nova.network.neutron [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Successfully updated port: bcfc5607-df16-4357-b78d-33b0451f9bb6 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.655928] env[61839]: DEBUG nova.compute.manager [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Received event network-changed-bcfc5607-df16-4357-b78d-33b0451f9bb6 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.655928] env[61839]: DEBUG nova.compute.manager [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Refreshing instance network info cache due to event network-changed-bcfc5607-df16-4357-b78d-33b0451f9bb6. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 783.655928] env[61839]: DEBUG oslo_concurrency.lockutils [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] Acquiring lock "refresh_cache-687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.655928] env[61839]: DEBUG oslo_concurrency.lockutils [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] Acquired lock "refresh_cache-687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.656133] env[61839]: DEBUG nova.network.neutron [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Refreshing network info cache for port bcfc5607-df16-4357-b78d-33b0451f9bb6 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.766887] env[61839]: DEBUG nova.scheduler.client.report [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 783.854688] env[61839]: DEBUG nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 783.880141] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 783.880415] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 783.880568] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.880745] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 783.880880] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.881033] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 783.881245] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 783.881402] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 783.881565] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 783.881744] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 783.881883] env[61839]: DEBUG nova.virt.hardware [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 783.882862] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6146e48-4198-42fd-8f1f-909464b0f8d5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.892963] env[61839]: DEBUG oslo_vmware.api [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314319, 'name': PowerOnVM_Task, 'duration_secs': 0.522344} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.894989] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.895214] env[61839]: INFO nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Took 6.50 seconds to spawn the instance on the hypervisor. [ 783.895410] env[61839]: DEBUG nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 783.896203] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88ec559-71ae-4e67-8820-64fdf76794cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.899468] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89759b28-cc85-4aba-aa61-8339ba442103 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.123076] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "refresh_cache-687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.205355] env[61839]: DEBUG nova.network.neutron [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.272375] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.273042] env[61839]: DEBUG nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.277804] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.352s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.367267] env[61839]: DEBUG nova.network.neutron [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.423648] env[61839]: INFO nova.compute.manager [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Took 26.36 seconds to build instance. [ 784.703095] env[61839]: DEBUG nova.network.neutron [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Successfully updated port: 140a636b-8035-4bf7-8462-ae02a41f2316 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.782941] env[61839]: DEBUG nova.compute.utils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.788226] env[61839]: DEBUG nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 784.789056] env[61839]: DEBUG nova.network.neutron [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.860561] env[61839]: DEBUG nova.policy [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e327fdc367e49559dc1ef4862ca1e2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '223d94c193814f649b5d1f35e3756071', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 784.871942] env[61839]: DEBUG oslo_concurrency.lockutils [req-3ea665f5-a2b2-42c9-abe4-b93867e86aab req-4d469ced-99f5-4201-b3bc-102b9816300f service nova] Releasing lock "refresh_cache-687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.871942] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquired lock "refresh_cache-687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.871942] env[61839]: DEBUG nova.network.neutron [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.932558] env[61839]: DEBUG oslo_concurrency.lockutils [None req-778e677c-c811-4fc9-adf1-a009f3699287 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.469s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.083835] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a900e6-d71d-4894-93a5-a36c90dbfbef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.091522] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62b2aa9-364d-4453-8233-82c60768bd69 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.121796] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13a8cd4-5a13-40ff-b2d3-e69165d31695 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.128899] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a271f4-39ee-4482-89ae-e838fb9f5653 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.142660] env[61839]: DEBUG nova.compute.provider_tree [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.164022] env[61839]: DEBUG nova.network.neutron [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Successfully created port: 2d6e228a-75ff-4bff-bc8d-bdde3218cf40 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.205996] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "refresh_cache-e65da0fd-e877-4b25-a319-e4d65397056a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.206164] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "refresh_cache-e65da0fd-e877-4b25-a319-e4d65397056a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.206318] env[61839]: DEBUG nova.network.neutron [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.286092] env[61839]: DEBUG nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 785.437610] env[61839]: DEBUG nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 785.531168] env[61839]: DEBUG nova.network.neutron [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.645356] env[61839]: DEBUG nova.scheduler.client.report [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 785.720207] env[61839]: DEBUG nova.compute.manager [None req-c7d638c4-b228-45e0-b361-03265858cd51 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 785.722660] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97dbc6c-9cca-430f-a013-89932428c0c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.778779] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.779832] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.779832] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.779832] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.779832] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.785024] env[61839]: INFO nova.compute.manager [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Terminating instance [ 785.785317] env[61839]: DEBUG nova.network.neutron [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.787587] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "refresh_cache-0bc0eefd-8a56-4cd6-a0b5-818cc437d917" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.787734] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquired lock "refresh_cache-0bc0eefd-8a56-4cd6-a0b5-818cc437d917" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.787893] env[61839]: DEBUG nova.network.neutron [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.963632] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.995076] env[61839]: DEBUG nova.network.neutron [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Updating instance_info_cache with network_info: [{"id": "bcfc5607-df16-4357-b78d-33b0451f9bb6", "address": "fa:16:3e:19:37:17", "network": {"id": "113e0d38-b139-40ea-a049-754ccec8453a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-827804901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f95ba3c476274972b332b5f24ad7cb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcfc5607-df", "ovs_interfaceid": "bcfc5607-df16-4357-b78d-33b0451f9bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.091060] env[61839]: DEBUG nova.network.neutron [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Updating instance_info_cache with network_info: [{"id": "140a636b-8035-4bf7-8462-ae02a41f2316", "address": "fa:16:3e:ac:25:c0", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap140a636b-80", "ovs_interfaceid": "140a636b-8035-4bf7-8462-ae02a41f2316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.109433] env[61839]: DEBUG nova.compute.manager [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Received event network-vif-plugged-140a636b-8035-4bf7-8462-ae02a41f2316 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.109690] env[61839]: DEBUG oslo_concurrency.lockutils [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] Acquiring lock "e65da0fd-e877-4b25-a319-e4d65397056a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.109815] env[61839]: DEBUG oslo_concurrency.lockutils [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] Lock "e65da0fd-e877-4b25-a319-e4d65397056a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.109982] env[61839]: DEBUG oslo_concurrency.lockutils [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] Lock "e65da0fd-e877-4b25-a319-e4d65397056a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.110828] env[61839]: DEBUG nova.compute.manager [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] No waiting events found dispatching network-vif-plugged-140a636b-8035-4bf7-8462-ae02a41f2316 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 786.111103] env[61839]: WARNING nova.compute.manager [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Received unexpected event network-vif-plugged-140a636b-8035-4bf7-8462-ae02a41f2316 for instance with vm_state building and task_state spawning. [ 786.111292] env[61839]: DEBUG nova.compute.manager [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Received event network-changed-140a636b-8035-4bf7-8462-ae02a41f2316 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.111589] env[61839]: DEBUG nova.compute.manager [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Refreshing instance network info cache due to event network-changed-140a636b-8035-4bf7-8462-ae02a41f2316. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 786.111672] env[61839]: DEBUG oslo_concurrency.lockutils [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] Acquiring lock "refresh_cache-e65da0fd-e877-4b25-a319-e4d65397056a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.152342] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.872s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.152342] env[61839]: ERROR nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. [ 786.152342] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Traceback (most recent call last): [ 786.152342] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.152342] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self.driver.spawn(context, instance, image_meta, [ 786.152342] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 786.152342] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.152342] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.152342] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] vm_ref = self.build_virtual_machine(instance, [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] for vif in network_info: [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return self._sync_wrapper(fn, *args, **kwargs) [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self.wait() [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self[:] = self._gt.wait() [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return self._exit_event.wait() [ 786.152733] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] result = hub.switch() [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return self.greenlet.switch() [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] result = function(*args, **kwargs) [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] return func(*args, **kwargs) [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] raise e [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] nwinfo = self.network_api.allocate_for_instance( [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.153044] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] created_port_ids = self._update_ports_for_instance( [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] with excutils.save_and_reraise_exception(): [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] self.force_reraise() [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] raise self.value [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] updated_port = self._update_port( [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] _ensure_no_port_binding_failure(port) [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.153333] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] raise exception.PortBindingFailed(port_id=port['id']) [ 786.153659] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] nova.exception.PortBindingFailed: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. [ 786.153659] env[61839]: ERROR nova.compute.manager [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] [ 786.153659] env[61839]: DEBUG nova.compute.utils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.154136] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.764s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.157723] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Build of instance 619ec15b-463a-4daa-bffe-7d7a6022b962 was re-scheduled: Binding failed for port 032feb82-164b-4265-a01b-c038ccb8a163, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 786.158222] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 786.158550] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.158776] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquired lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.159094] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.232621] env[61839]: INFO nova.compute.manager [None req-c7d638c4-b228-45e0-b361-03265858cd51 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] instance snapshotting [ 786.233260] env[61839]: DEBUG nova.objects.instance [None req-c7d638c4-b228-45e0-b361-03265858cd51 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lazy-loading 'flavor' on Instance uuid 0bc0eefd-8a56-4cd6-a0b5-818cc437d917 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 786.296408] env[61839]: DEBUG nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.312233] env[61839]: DEBUG nova.network.neutron [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.322880] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 786.323148] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 786.323306] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.323483] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 786.323681] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.323908] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 786.324148] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 786.324313] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 786.324486] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 786.324646] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 786.324819] env[61839]: DEBUG nova.virt.hardware [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.325932] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9aab16-78ab-4ce9-9ffa-11dcb8cc89d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.335262] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad62a1f-5bdb-4f1b-b437-5f31d63e74ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.363724] env[61839]: DEBUG nova.network.neutron [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.497771] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Releasing lock "refresh_cache-687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.498130] env[61839]: DEBUG nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Instance network_info: |[{"id": "bcfc5607-df16-4357-b78d-33b0451f9bb6", "address": "fa:16:3e:19:37:17", "network": {"id": "113e0d38-b139-40ea-a049-754ccec8453a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-827804901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f95ba3c476274972b332b5f24ad7cb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcfc5607-df", "ovs_interfaceid": "bcfc5607-df16-4357-b78d-33b0451f9bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 786.498632] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:37:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcfc5607-df16-4357-b78d-33b0451f9bb6', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.507152] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Creating folder: Project (f95ba3c476274972b332b5f24ad7cb1c). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.508564] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-770ddee9-925b-4f77-abbd-6ea52d5e3d30 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.519210] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Created folder: Project (f95ba3c476274972b332b5f24ad7cb1c) in parent group-v281288. [ 786.519520] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Creating folder: Instances. Parent ref: group-v281312. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.519760] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1ec4630-cce3-47c6-825f-64f04432fb54 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.528316] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Created folder: Instances in parent group-v281312. [ 786.528544] env[61839]: DEBUG oslo.service.loopingcall [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.528735] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.528934] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-508b3492-8ce1-4e36-b7f6-970b8c0163a4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.547433] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.547433] env[61839]: value = "task-1314322" [ 786.547433] env[61839]: _type = "Task" [ 786.547433] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.555149] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314322, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.594952] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "refresh_cache-e65da0fd-e877-4b25-a319-e4d65397056a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.595294] env[61839]: DEBUG nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Instance network_info: |[{"id": "140a636b-8035-4bf7-8462-ae02a41f2316", "address": "fa:16:3e:ac:25:c0", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap140a636b-80", "ovs_interfaceid": "140a636b-8035-4bf7-8462-ae02a41f2316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 786.595583] env[61839]: DEBUG oslo_concurrency.lockutils [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] Acquired lock "refresh_cache-e65da0fd-e877-4b25-a319-e4d65397056a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.595762] env[61839]: DEBUG nova.network.neutron [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Refreshing network info cache for port 140a636b-8035-4bf7-8462-ae02a41f2316 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.596961] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:25:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '140a636b-8035-4bf7-8462-ae02a41f2316', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.605221] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating folder: Project (251b0d2531ba4f14a2eb6ea75382c418). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.608198] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fc692e3-386b-4cd6-a889-d74a9ba9a688 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.618629] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Created folder: Project (251b0d2531ba4f14a2eb6ea75382c418) in parent group-v281288. [ 786.618878] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating folder: Instances. Parent ref: group-v281315. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.619150] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9af269ed-2e33-4558-9d72-59dca8524c88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.630133] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Created folder: Instances in parent group-v281315. [ 786.630376] env[61839]: DEBUG oslo.service.loopingcall [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.630559] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.630759] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8b28db4-d1d6-4728-8856-e1abbafec8e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.658388] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.658388] env[61839]: value = "task-1314325" [ 786.658388] env[61839]: _type = "Task" [ 786.658388] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.671464] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314325, 'name': CreateVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.700728] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.738681] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6913ab1-9440-46af-950d-6261ed3478a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.758585] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b05867e-e428-499e-b068-8c51cd819a18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.866755] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Releasing lock "refresh_cache-0bc0eefd-8a56-4cd6-a0b5-818cc437d917" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.868502] env[61839]: DEBUG nova.compute.manager [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 786.868502] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.868720] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7633bffc-ee0e-492a-acb5-4ad9099ea6ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.879412] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 786.879679] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a71b0f4-bf97-4753-84d7-986b81108788 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.892325] env[61839]: DEBUG oslo_vmware.api [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 786.892325] env[61839]: value = "task-1314326" [ 786.892325] env[61839]: _type = "Task" [ 786.892325] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.901834] env[61839]: DEBUG oslo_vmware.api [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.928202] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.026482] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf3a9ff-8ab7-449f-ba01-12173c76ce9d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.036906] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152e9047-f221-4375-82ab-985c9795df13 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.071966] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885dbd61-e66f-4431-83a9-dc3a4aa1519c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.079464] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314322, 'name': CreateVM_Task, 'duration_secs': 0.409741} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.081416] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 787.083968] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931fb57e-3198-4f7f-abd4-5707300dcc1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.097499] env[61839]: DEBUG nova.compute.provider_tree [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.099543] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.099707] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.100060] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 787.100306] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d82c84c-ecc4-4abf-9dd9-3daa40766a02 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.104846] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 787.104846] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525d7efb-1576-4ff3-0653-6e458e143705" [ 787.104846] env[61839]: _type = "Task" [ 787.104846] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.112886] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525d7efb-1576-4ff3-0653-6e458e143705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.168847] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314325, 'name': CreateVM_Task, 'duration_secs': 0.383618} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.169029] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 787.169675] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.175972] env[61839]: DEBUG nova.network.neutron [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Updated VIF entry in instance network info cache for port 140a636b-8035-4bf7-8462-ae02a41f2316. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.176342] env[61839]: DEBUG nova.network.neutron [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Updating instance_info_cache with network_info: [{"id": "140a636b-8035-4bf7-8462-ae02a41f2316", "address": "fa:16:3e:ac:25:c0", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap140a636b-80", "ovs_interfaceid": "140a636b-8035-4bf7-8462-ae02a41f2316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.268659] env[61839]: DEBUG nova.compute.manager [None req-c7d638c4-b228-45e0-b361-03265858cd51 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Instance disappeared during snapshot {{(pid=61839) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 787.405955] env[61839]: DEBUG oslo_vmware.api [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314326, 'name': PowerOffVM_Task, 'duration_secs': 0.132172} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.407941] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 787.407941] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 787.407941] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9407db8-003a-451e-a160-1ebdd65b21d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.432663] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Releasing lock "refresh_cache-619ec15b-463a-4daa-bffe-7d7a6022b962" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.432663] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 787.432663] env[61839]: DEBUG nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.432663] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.434370] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 787.434659] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 787.434934] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Deleting the datastore file [datastore1] 0bc0eefd-8a56-4cd6-a0b5-818cc437d917 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.435884] env[61839]: DEBUG nova.compute.manager [None req-c7d638c4-b228-45e0-b361-03265858cd51 tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Found 0 images (rotation: 2) {{(pid=61839) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 787.440466] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4092c0bc-7eff-476d-8ed4-ccea06e0281e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.447582] env[61839]: DEBUG oslo_vmware.api [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for the task: (returnval){ [ 787.447582] env[61839]: value = "task-1314328" [ 787.447582] env[61839]: _type = "Task" [ 787.447582] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.454974] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.460371] env[61839]: DEBUG oslo_vmware.api [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.468631] env[61839]: DEBUG nova.network.neutron [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Successfully updated port: 2d6e228a-75ff-4bff-bc8d-bdde3218cf40 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.603738] env[61839]: DEBUG nova.scheduler.client.report [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 787.617074] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525d7efb-1576-4ff3-0653-6e458e143705, 'name': SearchDatastore_Task, 'duration_secs': 0.011332} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.617370] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.617593] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.617822] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.617966] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.618159] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.618422] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.618726] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 787.618929] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccd9931a-90ce-4f0b-9a97-77e0557efb36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.620613] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38c7b830-f49d-4a27-a608-385e3a108869 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.626051] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 787.626051] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5250da44-5e79-4c37-13d5-32f947014dde" [ 787.626051] env[61839]: _type = "Task" [ 787.626051] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.632981] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5250da44-5e79-4c37-13d5-32f947014dde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.644701] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.645855] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.645855] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6213e57-5652-473f-94c2-711454ccf84f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.651653] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 787.651653] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5260c9de-cc94-9d86-b0e3-f859f974f378" [ 787.651653] env[61839]: _type = "Task" [ 787.651653] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.658469] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5260c9de-cc94-9d86-b0e3-f859f974f378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.679258] env[61839]: DEBUG oslo_concurrency.lockutils [req-16527f43-9581-4936-8ec0-45d43aa5f302 req-8416d8c1-2d49-4299-930f-79f1e0a54f7e service nova] Releasing lock "refresh_cache-e65da0fd-e877-4b25-a319-e4d65397056a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.957667] env[61839]: DEBUG oslo_vmware.api [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Task: {'id': task-1314328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116957} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.957930] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 787.958127] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 787.958305] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 787.958480] env[61839]: INFO nova.compute.manager [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Took 1.09 seconds to destroy the instance on the hypervisor. [ 787.958721] env[61839]: DEBUG oslo.service.loopingcall [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.958908] env[61839]: DEBUG nova.compute.manager [-] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.959009] env[61839]: DEBUG nova.network.neutron [-] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.962169] env[61839]: DEBUG nova.network.neutron [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.971158] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.971244] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.971319] env[61839]: DEBUG nova.network.neutron [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.976913] env[61839]: DEBUG nova.network.neutron [-] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.111964] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.958s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.112634] env[61839]: ERROR nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Traceback (most recent call last): [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self.driver.spawn(context, instance, image_meta, [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] vm_ref = self.build_virtual_machine(instance, [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.112634] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] for vif in network_info: [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return self._sync_wrapper(fn, *args, **kwargs) [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self.wait() [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self[:] = self._gt.wait() [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return self._exit_event.wait() [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] result = hub.switch() [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 788.112898] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return self.greenlet.switch() [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] result = function(*args, **kwargs) [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] return func(*args, **kwargs) [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] raise e [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] nwinfo = self.network_api.allocate_for_instance( [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] created_port_ids = self._update_ports_for_instance( [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] with excutils.save_and_reraise_exception(): [ 788.113287] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] self.force_reraise() [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] raise self.value [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] updated_port = self._update_port( [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] _ensure_no_port_binding_failure(port) [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] raise exception.PortBindingFailed(port_id=port['id']) [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] nova.exception.PortBindingFailed: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. [ 788.113643] env[61839]: ERROR nova.compute.manager [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] [ 788.113923] env[61839]: DEBUG nova.compute.utils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 788.114684] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.070s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.116139] env[61839]: INFO nova.compute.claims [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.119006] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Build of instance 821b784d-dc69-4c54-bccf-76693c34e19d was re-scheduled: Binding failed for port ceaf8820-e4ad-40ab-b463-0c7c1c6fa156, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 788.119452] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 788.119689] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Acquiring lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.119885] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Acquired lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.120062] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.134374] env[61839]: DEBUG nova.compute.manager [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Received event network-vif-plugged-2d6e228a-75ff-4bff-bc8d-bdde3218cf40 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.134769] env[61839]: DEBUG oslo_concurrency.lockutils [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] Acquiring lock "5c29c188-a34b-4751-9f8b-166af7b15088-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.134931] env[61839]: DEBUG oslo_concurrency.lockutils [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] Lock "5c29c188-a34b-4751-9f8b-166af7b15088-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.135145] env[61839]: DEBUG oslo_concurrency.lockutils [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] Lock "5c29c188-a34b-4751-9f8b-166af7b15088-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.135312] env[61839]: DEBUG nova.compute.manager [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] No waiting events found dispatching network-vif-plugged-2d6e228a-75ff-4bff-bc8d-bdde3218cf40 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 788.135435] env[61839]: WARNING nova.compute.manager [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Received unexpected event network-vif-plugged-2d6e228a-75ff-4bff-bc8d-bdde3218cf40 for instance with vm_state building and task_state spawning. [ 788.135590] env[61839]: DEBUG nova.compute.manager [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Received event network-changed-2d6e228a-75ff-4bff-bc8d-bdde3218cf40 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.135746] env[61839]: DEBUG nova.compute.manager [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Refreshing instance network info cache due to event network-changed-2d6e228a-75ff-4bff-bc8d-bdde3218cf40. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 788.135909] env[61839]: DEBUG oslo_concurrency.lockutils [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] Acquiring lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.140106] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5250da44-5e79-4c37-13d5-32f947014dde, 'name': SearchDatastore_Task, 'duration_secs': 0.025161} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.140378] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.140591] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.140813] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.162565] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5260c9de-cc94-9d86-b0e3-f859f974f378, 'name': SearchDatastore_Task, 'duration_secs': 0.008984} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.163327] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ec4022c-6514-4065-b15b-504c48edf630 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.168564] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 788.168564] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fac34e-64e9-c88b-d5c7-229bbb284a6c" [ 788.168564] env[61839]: _type = "Task" [ 788.168564] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.176201] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fac34e-64e9-c88b-d5c7-229bbb284a6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.464805] env[61839]: INFO nova.compute.manager [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 619ec15b-463a-4daa-bffe-7d7a6022b962] Took 1.03 seconds to deallocate network for instance. [ 788.479885] env[61839]: DEBUG nova.network.neutron [-] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.511767] env[61839]: DEBUG nova.network.neutron [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.653073] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.682254] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fac34e-64e9-c88b-d5c7-229bbb284a6c, 'name': SearchDatastore_Task, 'duration_secs': 0.008385} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.682254] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.682254] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a/687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 788.682254] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.682426] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.682426] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32766092-b1b7-4be5-8464-75b5fe5a9554 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.682426] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad812b3f-0fd9-4db0-b5ee-3efca4643466 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.690050] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 788.690050] env[61839]: value = "task-1314329" [ 788.690050] env[61839]: _type = "Task" [ 788.690050] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.691440] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.691709] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.695205] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adadc968-cdd6-4e49-a11a-bd4bed27322e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.698489] env[61839]: DEBUG nova.network.neutron [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updating instance_info_cache with network_info: [{"id": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "address": "fa:16:3e:5a:64:4f", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e228a-75", "ovs_interfaceid": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.706215] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.707807] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 788.707807] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5263598f-c860-8033-d915-e2538b910415" [ 788.707807] env[61839]: _type = "Task" [ 788.707807] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.720170] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5263598f-c860-8033-d915-e2538b910415, 'name': SearchDatastore_Task, 'duration_secs': 0.007557} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.720327] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a16608b-35ee-47ec-a7f9-07fc253f0f40 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.725786] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 788.725786] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52580d9b-fca7-8684-ded9-f34e8866eefc" [ 788.725786] env[61839]: _type = "Task" [ 788.725786] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.733255] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52580d9b-fca7-8684-ded9-f34e8866eefc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.760917] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.982239] env[61839]: INFO nova.compute.manager [-] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Took 1.02 seconds to deallocate network for instance. [ 789.202385] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314329, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484213} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.202791] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.203099] env[61839]: DEBUG nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Instance network_info: |[{"id": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "address": "fa:16:3e:5a:64:4f", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e228a-75", "ovs_interfaceid": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 789.203362] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a/687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 789.203585] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.203858] env[61839]: DEBUG oslo_concurrency.lockutils [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] Acquired lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.204040] env[61839]: DEBUG nova.network.neutron [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Refreshing network info cache for port 2d6e228a-75ff-4bff-bc8d-bdde3218cf40 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.205151] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:64:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d6e228a-75ff-4bff-bc8d-bdde3218cf40', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.212414] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating folder: Project (223d94c193814f649b5d1f35e3756071). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.212650] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a6374fb-ee8f-44d8-afc5-7984979b6bd6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.217228] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e53f182f-b90c-4054-abb9-f7ed9911f88c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.225051] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 789.225051] env[61839]: value = "task-1314330" [ 789.225051] env[61839]: _type = "Task" [ 789.225051] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.233829] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created folder: Project (223d94c193814f649b5d1f35e3756071) in parent group-v281288. [ 789.234020] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating folder: Instances. Parent ref: group-v281318. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.238431] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12a3b921-6079-4221-ae9b-530212d3ff29 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.240217] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314330, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.250994] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52580d9b-fca7-8684-ded9-f34e8866eefc, 'name': SearchDatastore_Task, 'duration_secs': 0.007541} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.252186] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.252468] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] e65da0fd-e877-4b25-a319-e4d65397056a/e65da0fd-e877-4b25-a319-e4d65397056a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.252756] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created folder: Instances in parent group-v281318. [ 789.252988] env[61839]: DEBUG oslo.service.loopingcall [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.253199] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39c8c878-104c-406d-b30b-e02c64bfac4c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.255114] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.255339] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eafb17d8-dcf0-4acb-95d7-3380ce2df6d9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.272265] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Releasing lock "refresh_cache-821b784d-dc69-4c54-bccf-76693c34e19d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.272477] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 789.272679] env[61839]: DEBUG nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 789.272855] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.276744] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 789.276744] env[61839]: value = "task-1314333" [ 789.276744] env[61839]: _type = "Task" [ 789.276744] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.280797] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.280797] env[61839]: value = "task-1314334" [ 789.280797] env[61839]: _type = "Task" [ 789.280797] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.293675] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.293905] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314334, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.294723] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.424472] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1919b8f-45bb-4ffa-9a3e-b101aa4b5e9a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.432906] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47e3da8-5d46-4ba4-a6c4-779487c2c24a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.467389] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba7acf5-e0d9-415d-9eec-43d4beb38b7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.479982] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538a6bec-0ba9-4951-8eb8-4274f160e6a7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.498411] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.499137] env[61839]: DEBUG nova.compute.provider_tree [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.501739] env[61839]: INFO nova.scheduler.client.report [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Deleted allocations for instance 619ec15b-463a-4daa-bffe-7d7a6022b962 [ 789.735706] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314330, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069688} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.735706] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.736514] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c450e8-ca65-4ea5-805f-c7cf21f1f00a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.757455] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a/687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.759698] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb2bcbc6-af93-4d5b-be08-a1bfc6073d45 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.782036] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 789.782036] env[61839]: value = "task-1314335" [ 789.782036] env[61839]: _type = "Task" [ 789.782036] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.792537] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475767} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.793314] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] e65da0fd-e877-4b25-a319-e4d65397056a/e65da0fd-e877-4b25-a319-e4d65397056a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 789.793563] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.794275] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29d22f9e-cfb0-488d-a9d4-a1592b7c919a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.804945] env[61839]: DEBUG nova.network.neutron [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.806424] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314335, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.806643] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314334, 'name': CreateVM_Task, 'duration_secs': 0.427562} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.808195] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 789.808195] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.808314] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.808585] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 789.808863] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b92f9c8-751c-47b5-a8c9-04d30dedee9a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.811980] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 789.811980] env[61839]: value = "task-1314336" [ 789.811980] env[61839]: _type = "Task" [ 789.811980] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.816682] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 789.816682] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529d7fb5-4ae9-af4d-a7bc-a6b09b8a505c" [ 789.816682] env[61839]: _type = "Task" [ 789.816682] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.825474] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314336, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.830469] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529d7fb5-4ae9-af4d-a7bc-a6b09b8a505c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.001217] env[61839]: DEBUG nova.network.neutron [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updated VIF entry in instance network info cache for port 2d6e228a-75ff-4bff-bc8d-bdde3218cf40. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.001619] env[61839]: DEBUG nova.network.neutron [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updating instance_info_cache with network_info: [{"id": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "address": "fa:16:3e:5a:64:4f", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e228a-75", "ovs_interfaceid": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.010189] env[61839]: DEBUG nova.scheduler.client.report [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.013938] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ba95565c-feba-4942-8392-448356c664a8 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "619ec15b-463a-4daa-bffe-7d7a6022b962" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 180.630s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.295305] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314335, 'name': ReconfigVM_Task, 'duration_secs': 0.478872} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.295799] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a/687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.296220] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ab484cf-8523-4524-91a0-f2c79ef61a98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.302090] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 790.302090] env[61839]: value = "task-1314337" [ 790.302090] env[61839]: _type = "Task" [ 790.302090] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.310498] env[61839]: INFO nova.compute.manager [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] [instance: 821b784d-dc69-4c54-bccf-76693c34e19d] Took 1.04 seconds to deallocate network for instance. [ 790.312625] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314337, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.320074] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314336, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.328055] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529d7fb5-4ae9-af4d-a7bc-a6b09b8a505c, 'name': SearchDatastore_Task, 'duration_secs': 0.020219} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.328309] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.328541] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.328808] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.328911] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.329096] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.329357] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e80fed25-c8dc-456e-b8df-0203c92e29c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.336687] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.336876] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 790.337647] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269e388d-255b-4814-ba68-c4e74d64b45b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.342934] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 790.342934] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b1bdef-3aa0-8bb3-8ecf-204c0f6e9060" [ 790.342934] env[61839]: _type = "Task" [ 790.342934] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.350025] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b1bdef-3aa0-8bb3-8ecf-204c0f6e9060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.504112] env[61839]: DEBUG oslo_concurrency.lockutils [req-1b516aee-072f-4bb2-9261-57c0caa43fd3 req-b161ad2b-b23d-432e-b32f-35bb45c7ed1e service nova] Releasing lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.515310] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.515894] env[61839]: DEBUG nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 790.518382] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.080s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.519799] env[61839]: DEBUG nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 790.812513] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314337, 'name': Rename_Task, 'duration_secs': 0.137946} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.812835] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.816741] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f80f646-323c-4a95-a6e6-89e5e31dda6c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.828357] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314336, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.512102} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.831599] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.831599] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 790.831599] env[61839]: value = "task-1314338" [ 790.831599] env[61839]: _type = "Task" [ 790.831599] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.831599] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7392df53-7ed6-4ce0-850d-6620005fb714 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.844666] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314338, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.868485] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] e65da0fd-e877-4b25-a319-e4d65397056a/e65da0fd-e877-4b25-a319-e4d65397056a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.869561] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8896c591-3abf-42e5-9497-293bcd6ef338 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.890862] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b1bdef-3aa0-8bb3-8ecf-204c0f6e9060, 'name': SearchDatastore_Task, 'duration_secs': 0.009042} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.892718] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 790.892718] env[61839]: value = "task-1314339" [ 790.892718] env[61839]: _type = "Task" [ 790.892718] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.892995] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d28e6c92-17f8-4bc1-8076-c7aa361f49cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.904490] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314339, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.905135] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 790.905135] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524edaa6-80b8-765a-f6f3-14b9e92e9380" [ 790.905135] env[61839]: _type = "Task" [ 790.905135] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.914450] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524edaa6-80b8-765a-f6f3-14b9e92e9380, 'name': SearchDatastore_Task, 'duration_secs': 0.009084} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.914797] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.915124] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/5c29c188-a34b-4751-9f8b-166af7b15088.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 790.915416] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d0431cc-8552-4ff5-af90-57e7a2feb11e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.920943] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 790.920943] env[61839]: value = "task-1314340" [ 790.920943] env[61839]: _type = "Task" [ 790.920943] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.929303] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314340, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.028546] env[61839]: DEBUG nova.compute.utils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.036801] env[61839]: DEBUG nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 791.037299] env[61839]: DEBUG nova.network.neutron [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.061940] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.084681] env[61839]: DEBUG nova.policy [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e327fdc367e49559dc1ef4862ca1e2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '223d94c193814f649b5d1f35e3756071', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 791.345134] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314338, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.354201] env[61839]: INFO nova.scheduler.client.report [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Deleted allocations for instance 821b784d-dc69-4c54-bccf-76693c34e19d [ 791.411843] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314339, 'name': ReconfigVM_Task, 'duration_secs': 0.425237} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.411843] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Reconfigured VM instance instance-00000037 to attach disk [datastore1] e65da0fd-e877-4b25-a319-e4d65397056a/e65da0fd-e877-4b25-a319-e4d65397056a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.413067] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da6e6efc-06a3-4871-84e9-eb03b3b95fdb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.424266] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 791.424266] env[61839]: value = "task-1314341" [ 791.424266] env[61839]: _type = "Task" [ 791.424266] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.438095] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314340, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.443723] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314341, 'name': Rename_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.466142] env[61839]: DEBUG nova.network.neutron [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Successfully created port: fc601f8f-cb33-41b2-9f00-9476cd3cbf01 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.537358] env[61839]: DEBUG nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 791.567250] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance e81bf730-9cf6-4728-aae4-4962115f8b6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.567428] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 697222e0-07e5-4a3d-adbe-d5d815cf4756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.567581] env[61839]: WARNING nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 0bc0eefd-8a56-4cd6-a0b5-818cc437d917 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 791.567708] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.567825] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance e65da0fd-e877-4b25-a319-e4d65397056a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.567965] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5c29c188-a34b-4751-9f8b-166af7b15088 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.568129] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 603191b6-a4b0-451b-b98b-f3dbfb684300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.847614] env[61839]: DEBUG oslo_vmware.api [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314338, 'name': PowerOnVM_Task, 'duration_secs': 0.644195} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.847893] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.848644] env[61839]: INFO nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Took 9.38 seconds to spawn the instance on the hypervisor. [ 791.849089] env[61839]: DEBUG nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 791.850679] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3468ab2d-eaf8-4d4f-bb2a-c824933af104 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.865425] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b5bdc0dc-c0c3-4680-812b-c7e0bf9d39a6 tempest-InstanceActionsNegativeTestJSON-917154709 tempest-InstanceActionsNegativeTestJSON-917154709-project-member] Lock "821b784d-dc69-4c54-bccf-76693c34e19d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 180.260s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.940458] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314340, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546055} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.943446] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/5c29c188-a34b-4751-9f8b-166af7b15088.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.943701] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.943965] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314341, 'name': Rename_Task, 'duration_secs': 0.258513} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.944170] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-581ec75d-9648-4adc-9b73-cfe3ce36015d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.946079] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.946334] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da27d997-806f-4f22-a480-6bbfc9dee843 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.952676] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 791.952676] env[61839]: value = "task-1314342" [ 791.952676] env[61839]: _type = "Task" [ 791.952676] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.957426] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 791.957426] env[61839]: value = "task-1314343" [ 791.957426] env[61839]: _type = "Task" [ 791.957426] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.963343] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.968124] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.071748] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 86525ea7-af75-4b10-85a1-c0fbab73ea5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 792.371907] env[61839]: DEBUG nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 792.381735] env[61839]: INFO nova.compute.manager [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Took 30.00 seconds to build instance. [ 792.466458] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083658} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.467245] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.468086] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64885b45-fac9-46bd-8642-c9b058706743 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.474464] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314343, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.495170] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/5c29c188-a34b-4751-9f8b-166af7b15088.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.495783] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f2ee41f-84ee-4933-988c-1f3057cf50a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.518262] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 792.518262] env[61839]: value = "task-1314344" [ 792.518262] env[61839]: _type = "Task" [ 792.518262] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.524834] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.549993] env[61839]: DEBUG nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 792.574602] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a661cc10-5c4e-421b-b70b-189f0a613e8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 792.590281] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.590560] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.590723] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.590905] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.591064] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.591215] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.591423] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.591645] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.591827] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.591993] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.592181] env[61839]: DEBUG nova.virt.hardware [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.593675] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297f860f-94d5-40f4-9800-1ef8fdc7250f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.602606] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326e9491-1cc6-42cb-bd42-51c6f993fc7b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.883620] env[61839]: DEBUG oslo_concurrency.lockutils [None req-130eacd9-cc23-48e6-bf9f-acda8817b91f tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.134s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.898261] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.968511] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314343, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.027296] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.063349] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "2cb53e37-8b0b-48b7-a973-061b91df46df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.063349] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.079766] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.351755] env[61839]: DEBUG nova.compute.manager [req-90b3d112-97d3-4a00-bcd2-d5cc0775acb5 req-a141bc21-af0e-44f2-8b98-a3f3e140cf8a service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Received event network-vif-plugged-fc601f8f-cb33-41b2-9f00-9476cd3cbf01 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 793.351977] env[61839]: DEBUG oslo_concurrency.lockutils [req-90b3d112-97d3-4a00-bcd2-d5cc0775acb5 req-a141bc21-af0e-44f2-8b98-a3f3e140cf8a service nova] Acquiring lock "603191b6-a4b0-451b-b98b-f3dbfb684300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.352201] env[61839]: DEBUG oslo_concurrency.lockutils [req-90b3d112-97d3-4a00-bcd2-d5cc0775acb5 req-a141bc21-af0e-44f2-8b98-a3f3e140cf8a service nova] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.352373] env[61839]: DEBUG oslo_concurrency.lockutils [req-90b3d112-97d3-4a00-bcd2-d5cc0775acb5 req-a141bc21-af0e-44f2-8b98-a3f3e140cf8a service nova] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.352541] env[61839]: DEBUG nova.compute.manager [req-90b3d112-97d3-4a00-bcd2-d5cc0775acb5 req-a141bc21-af0e-44f2-8b98-a3f3e140cf8a service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] No waiting events found dispatching network-vif-plugged-fc601f8f-cb33-41b2-9f00-9476cd3cbf01 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 793.352702] env[61839]: WARNING nova.compute.manager [req-90b3d112-97d3-4a00-bcd2-d5cc0775acb5 req-a141bc21-af0e-44f2-8b98-a3f3e140cf8a service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Received unexpected event network-vif-plugged-fc601f8f-cb33-41b2-9f00-9476cd3cbf01 for instance with vm_state building and task_state spawning. [ 793.389203] env[61839]: DEBUG nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 793.436845] env[61839]: DEBUG nova.network.neutron [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Successfully updated port: fc601f8f-cb33-41b2-9f00-9476cd3cbf01 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 793.469730] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314343, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.526927] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314344, 'name': ReconfigVM_Task, 'duration_secs': 0.739427} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.527240] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/5c29c188-a34b-4751-9f8b-166af7b15088.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.527878] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29f8dd43-df1f-4f0c-abe8-9244188cb45c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.534035] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 793.534035] env[61839]: value = "task-1314345" [ 793.534035] env[61839]: _type = "Task" [ 793.534035] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.541680] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314345, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.581626] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.913159] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.943095] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.943231] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.943856] env[61839]: DEBUG nova.network.neutron [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.970888] env[61839]: DEBUG oslo_vmware.api [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314343, 'name': PowerOnVM_Task, 'duration_secs': 2.006901} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.971180] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 793.971453] env[61839]: INFO nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Took 10.12 seconds to spawn the instance on the hypervisor. [ 793.971663] env[61839]: DEBUG nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 793.972452] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a0b3c1-78d7-4159-b887-066ba619eca7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.044833] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314345, 'name': Rename_Task, 'duration_secs': 0.194848} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.045050] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 794.045303] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fdedbe6-776f-4354-aca0-3e6b2935c38b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.052138] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 794.052138] env[61839]: value = "task-1314346" [ 794.052138] env[61839]: _type = "Task" [ 794.052138] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.063773] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.085024] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 047080fa-8781-47b1-89d8-2e4c8031b164 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.488972] env[61839]: DEBUG nova.network.neutron [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.490913] env[61839]: INFO nova.compute.manager [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Took 30.52 seconds to build instance. [ 794.561987] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314346, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.589886] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 406da948-71c7-4c28-9ee3-10af64b1ab51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.685685] env[61839]: DEBUG nova.network.neutron [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Updating instance_info_cache with network_info: [{"id": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "address": "fa:16:3e:31:8e:b6", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc601f8f-cb", "ovs_interfaceid": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.994052] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebcaba21-dc43-4473-b939-c569f2c1af99 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "e65da0fd-e877-4b25-a319-e4d65397056a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.128s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.063091] env[61839]: DEBUG oslo_vmware.api [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314346, 'name': PowerOnVM_Task, 'duration_secs': 0.942723} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.063372] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 795.063604] env[61839]: INFO nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Took 8.77 seconds to spawn the instance on the hypervisor. [ 795.063818] env[61839]: DEBUG nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 795.064578] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940b3a30-60ff-4af7-98e5-b8c97093bb36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.095256] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 694a5d4b-3673-406b-a24a-d37fad33e549 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.187370] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.187724] env[61839]: DEBUG nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Instance network_info: |[{"id": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "address": "fa:16:3e:31:8e:b6", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc601f8f-cb", "ovs_interfaceid": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 795.188143] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:8e:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc601f8f-cb33-41b2-9f00-9476cd3cbf01', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 795.195645] env[61839]: DEBUG oslo.service.loopingcall [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 795.195855] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 795.196113] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35ad7551-839e-4126-b5b0-fa8bb2ac096a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.215522] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 795.215522] env[61839]: value = "task-1314347" [ 795.215522] env[61839]: _type = "Task" [ 795.215522] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.224340] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314347, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.382767] env[61839]: DEBUG nova.compute.manager [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Received event network-changed-fc601f8f-cb33-41b2-9f00-9476cd3cbf01 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 795.383015] env[61839]: DEBUG nova.compute.manager [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Refreshing instance network info cache due to event network-changed-fc601f8f-cb33-41b2-9f00-9476cd3cbf01. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 795.383284] env[61839]: DEBUG oslo_concurrency.lockutils [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] Acquiring lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.383466] env[61839]: DEBUG oslo_concurrency.lockutils [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] Acquired lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.383682] env[61839]: DEBUG nova.network.neutron [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Refreshing network info cache for port fc601f8f-cb33-41b2-9f00-9476cd3cbf01 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.458129] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb1a0d7-b7e8-4d59-bf3d-c2a7cb9ad06f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.465711] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b22f1415-8152-49d0-841f-bb10b32c75f1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Suspending the VM {{(pid=61839) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 795.465984] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-5cccc14c-9dd2-4598-8764-7a90cabb5aad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.472666] env[61839]: DEBUG oslo_vmware.api [None req-b22f1415-8152-49d0-841f-bb10b32c75f1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 795.472666] env[61839]: value = "task-1314348" [ 795.472666] env[61839]: _type = "Task" [ 795.472666] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.481725] env[61839]: DEBUG oslo_vmware.api [None req-b22f1415-8152-49d0-841f-bb10b32c75f1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314348, 'name': SuspendVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.496576] env[61839]: DEBUG nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 795.584176] env[61839]: INFO nova.compute.manager [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Took 26.79 seconds to build instance. [ 795.598565] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 39728872-2d30-48eb-90da-412f1e45971c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.727460] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314347, 'name': CreateVM_Task, 'duration_secs': 0.495337} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.727690] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.729701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.729701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.729701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 795.729701] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdf87d6a-24a3-4e2a-af14-80845f2b6134 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.737065] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 795.737065] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5265fce3-9693-d462-64c8-295bf3499c56" [ 795.737065] env[61839]: _type = "Task" [ 795.737065] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.747167] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5265fce3-9693-d462-64c8-295bf3499c56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.983766] env[61839]: DEBUG oslo_vmware.api [None req-b22f1415-8152-49d0-841f-bb10b32c75f1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314348, 'name': SuspendVM_Task} progress is 58%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.023283] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.042243] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.042493] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.042697] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.042879] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.043058] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.045411] env[61839]: INFO nova.compute.manager [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Terminating instance [ 796.047459] env[61839]: DEBUG nova.compute.manager [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 796.047664] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.048515] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e53cc9-4e29-4683-ae7e-71fea33e6eb0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.056655] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.056928] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1cb049a-ed78-485b-8689-1356c89808f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.064462] env[61839]: DEBUG oslo_vmware.api [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 796.064462] env[61839]: value = "task-1314349" [ 796.064462] env[61839]: _type = "Task" [ 796.064462] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.072826] env[61839]: DEBUG oslo_vmware.api [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.086463] env[61839]: DEBUG oslo_concurrency.lockutils [None req-85100d62-2957-4746-911c-e5a5695cb3f6 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "5c29c188-a34b-4751-9f8b-166af7b15088" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.948s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.102400] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 49d4720b-83e3-47d9-b727-5bb255de2e7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.219502] env[61839]: DEBUG nova.network.neutron [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Updated VIF entry in instance network info cache for port fc601f8f-cb33-41b2-9f00-9476cd3cbf01. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 796.219886] env[61839]: DEBUG nova.network.neutron [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Updating instance_info_cache with network_info: [{"id": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "address": "fa:16:3e:31:8e:b6", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc601f8f-cb", "ovs_interfaceid": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.248504] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5265fce3-9693-d462-64c8-295bf3499c56, 'name': SearchDatastore_Task, 'duration_secs': 0.037312} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.248812] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.249064] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 796.249306] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.249456] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.249637] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.249914] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52454e55-0dc5-41d3-b871-bd84c8249cb6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.258961] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.259178] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 796.259937] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84e71d7f-54d8-4f51-903e-d8500a74d4f7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.266198] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 796.266198] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d21ede-77a3-5875-99a1-f850dc267ccd" [ 796.266198] env[61839]: _type = "Task" [ 796.266198] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.274837] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d21ede-77a3-5875-99a1-f850dc267ccd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.485922] env[61839]: DEBUG oslo_vmware.api [None req-b22f1415-8152-49d0-841f-bb10b32c75f1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314348, 'name': SuspendVM_Task, 'duration_secs': 0.925917} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.486278] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b22f1415-8152-49d0-841f-bb10b32c75f1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Suspended the VM {{(pid=61839) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 796.486407] env[61839]: DEBUG nova.compute.manager [None req-b22f1415-8152-49d0-841f-bb10b32c75f1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 796.487186] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5ca36c-8c36-41b9-9df7-e45bb971c2ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.574470] env[61839]: DEBUG oslo_vmware.api [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314349, 'name': PowerOffVM_Task, 'duration_secs': 0.393198} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.574774] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.574953] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.575228] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eddf6435-8d7f-4c68-94a3-a465396f94bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.588921] env[61839]: DEBUG nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 796.607055] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 62959833-5834-4c0a-bf4e-3ac1157b3b0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.666924] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.667102] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.667263] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Deleting the datastore file [datastore1] 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.667532] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cd5683c-e0ee-49a9-ad1d-bedc8a728d4f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.674840] env[61839]: DEBUG oslo_vmware.api [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for the task: (returnval){ [ 796.674840] env[61839]: value = "task-1314351" [ 796.674840] env[61839]: _type = "Task" [ 796.674840] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.682909] env[61839]: DEBUG oslo_vmware.api [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.723292] env[61839]: DEBUG oslo_concurrency.lockutils [req-d4517824-d8d4-4f97-9fcb-a08c0310c149 req-aa594278-5622-44b5-8211-8eb571839f18 service nova] Releasing lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.777785] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d21ede-77a3-5875-99a1-f850dc267ccd, 'name': SearchDatastore_Task, 'duration_secs': 0.012663} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.778707] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aae832d-6ba0-4061-9d1e-335c49b4e19b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.784251] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 796.784251] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5243c848-5313-d824-56a9-095050c7a25d" [ 796.784251] env[61839]: _type = "Task" [ 796.784251] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.792342] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5243c848-5313-d824-56a9-095050c7a25d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.110889] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 40c54d84-8e50-483a-b4e0-5f1cc72b0880 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.113211] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.186068] env[61839]: DEBUG oslo_vmware.api [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Task: {'id': task-1314351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.494613} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.186068] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.186349] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.186413] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.186593] env[61839]: INFO nova.compute.manager [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 797.186839] env[61839]: DEBUG oslo.service.loopingcall [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.187040] env[61839]: DEBUG nova.compute.manager [-] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 797.187143] env[61839]: DEBUG nova.network.neutron [-] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.297017] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5243c848-5313-d824-56a9-095050c7a25d, 'name': SearchDatastore_Task, 'duration_secs': 0.010872} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.297307] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.297591] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/603191b6-a4b0-451b-b98b-f3dbfb684300.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 797.297877] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3cee0bd4-68b3-4c09-8237-8695d6dae98c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.304669] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 797.304669] env[61839]: value = "task-1314352" [ 797.304669] env[61839]: _type = "Task" [ 797.304669] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.314538] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.614710] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.615052] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 797.615213] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 797.817108] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314352, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.951637] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e667f7bb-f7d6-4b14-a3ea-2010fea6c424 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.959819] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dad88c3-9ef5-4c43-aad8-ee4429e117e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.994436] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0379e8c-93f7-489b-9c70-f9438fb693d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.002112] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25959a65-e9fd-437c-b22e-86361d7b8442 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.020529] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.213343] env[61839]: DEBUG nova.compute.manager [req-8db333bc-0ed7-45b4-a52c-771c9a1a500a req-5fcfcd40-08ee-496d-813e-de90b6f9f4fe service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Received event network-vif-deleted-bcfc5607-df16-4357-b78d-33b0451f9bb6 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 798.213696] env[61839]: INFO nova.compute.manager [req-8db333bc-0ed7-45b4-a52c-771c9a1a500a req-5fcfcd40-08ee-496d-813e-de90b6f9f4fe service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Neutron deleted interface bcfc5607-df16-4357-b78d-33b0451f9bb6; detaching it from the instance and deleting it from the info cache [ 798.213981] env[61839]: DEBUG nova.network.neutron [req-8db333bc-0ed7-45b4-a52c-771c9a1a500a req-5fcfcd40-08ee-496d-813e-de90b6f9f4fe service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.316642] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593519} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.316940] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/603191b6-a4b0-451b-b98b-f3dbfb684300.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 798.317167] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 798.317422] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b31eaa7f-5aa6-4fcd-b3c7-d4add9334fe0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.324770] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 798.324770] env[61839]: value = "task-1314353" [ 798.324770] env[61839]: _type = "Task" [ 798.324770] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.333125] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.386616] env[61839]: DEBUG nova.network.neutron [-] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.525365] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.716708] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4f5d052-603b-4b21-8c45-86194f80069f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.726098] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f428654-9b14-4408-85aa-e39a7b6932b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.751231] env[61839]: DEBUG nova.compute.manager [req-8db333bc-0ed7-45b4-a52c-771c9a1a500a req-5fcfcd40-08ee-496d-813e-de90b6f9f4fe service nova] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Detach interface failed, port_id=bcfc5607-df16-4357-b78d-33b0451f9bb6, reason: Instance 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 798.837029] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116008} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.837029] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.837367] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87a0abc-6128-4eb6-b746-18e2a7833bc1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.859012] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/603191b6-a4b0-451b-b98b-f3dbfb684300.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.859295] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef189ca4-f93b-45dd-b659-ef24e4a338b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.879107] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 798.879107] env[61839]: value = "task-1314354" [ 798.879107] env[61839]: _type = "Task" [ 798.879107] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.886841] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314354, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.888329] env[61839]: INFO nova.compute.manager [-] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Took 1.70 seconds to deallocate network for instance. [ 799.030261] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 799.030520] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.512s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.030798] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.984s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.033719] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.033877] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Cleaning up deleted instances {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 799.240131] env[61839]: DEBUG nova.compute.manager [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 799.241102] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26a3986-c228-4007-80fa-3ad25b54c321 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.389529] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314354, 'name': ReconfigVM_Task, 'duration_secs': 0.305313} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.389898] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/603191b6-a4b0-451b-b98b-f3dbfb684300.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.390531] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c1011b6-aed4-41ca-9875-e127125de03f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.394271] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.397248] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 799.397248] env[61839]: value = "task-1314355" [ 799.397248] env[61839]: _type = "Task" [ 799.397248] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.405161] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314355, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.541604] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] There are 4 instances to clean {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 799.541954] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 81ba4888-4b21-410f-ab86-a3068995836f] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 799.751738] env[61839]: INFO nova.compute.manager [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] instance snapshotting [ 799.752057] env[61839]: WARNING nova.compute.manager [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 799.755139] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e8a27e-6dda-4669-898e-b504bd202911 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.781339] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7fcc50-35d0-434b-86a6-0e2f36a0afc6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.854641] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a48ebae-847f-4216-ad20-c9d2aee7d517 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.863449] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e048b1de-46bf-48ce-ba69-bfa2d6709d15 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.893143] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568edaa1-bca5-42e8-8a7b-b6fa045455b9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.902561] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248ea366-cfe8-4f60-8d87-25481fd674fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.910182] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314355, 'name': Rename_Task, 'duration_secs': 0.127334} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.917555] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.917991] env[61839]: DEBUG nova.compute.provider_tree [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.919198] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f041afa1-71f9-4adf-ae4d-b18004d30d4a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.925959] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 799.925959] env[61839]: value = "task-1314356" [ 799.925959] env[61839]: _type = "Task" [ 799.925959] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.935784] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.046381] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d8b0b608-d8ca-45ce-a113-ee96b5ef1a9d] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 800.292502] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 800.292898] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9ee1391b-1cbc-4257-8f61-b27ff45b4862 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.302300] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 800.302300] env[61839]: value = "task-1314357" [ 800.302300] env[61839]: _type = "Task" [ 800.302300] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.310321] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314357, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.422028] env[61839]: DEBUG nova.scheduler.client.report [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.439265] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314356, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.550067] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 41823a25-5ff2-4838-854d-5bada8e5daca] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 800.812259] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314357, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.928488] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.898s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.929163] env[61839]: ERROR nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Traceback (most recent call last): [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self.driver.spawn(context, instance, image_meta, [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] vm_ref = self.build_virtual_machine(instance, [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] vif_infos = vmwarevif.get_vif_info(self._session, [ 800.929163] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] for vif in network_info: [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return self._sync_wrapper(fn, *args, **kwargs) [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self.wait() [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self[:] = self._gt.wait() [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return self._exit_event.wait() [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] result = hub.switch() [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 800.929460] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return self.greenlet.switch() [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] result = function(*args, **kwargs) [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] return func(*args, **kwargs) [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] raise e [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] nwinfo = self.network_api.allocate_for_instance( [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] created_port_ids = self._update_ports_for_instance( [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] with excutils.save_and_reraise_exception(): [ 800.929748] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] self.force_reraise() [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] raise self.value [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] updated_port = self._update_port( [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] _ensure_no_port_binding_failure(port) [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] raise exception.PortBindingFailed(port_id=port['id']) [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] nova.exception.PortBindingFailed: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. [ 800.930042] env[61839]: ERROR nova.compute.manager [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] [ 800.930308] env[61839]: DEBUG nova.compute.utils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 800.931293] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.529s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.937485] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Build of instance e81bf730-9cf6-4728-aae4-4962115f8b6f was re-scheduled: Binding failed for port c5255785-dcf2-4894-812d-0a92bec33b87, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 800.937936] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 800.938187] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.938337] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.938501] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 800.944907] env[61839]: DEBUG oslo_vmware.api [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314356, 'name': PowerOnVM_Task, 'duration_secs': 0.842962} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.945207] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 800.945420] env[61839]: INFO nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Took 8.40 seconds to spawn the instance on the hypervisor. [ 800.945698] env[61839]: DEBUG nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 800.946701] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b16dd4-e7ac-4d5a-8a9a-4a393a507d32 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.053090] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 211e8267-3c33-42c8-852f-1c20d7987453] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 801.312104] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314357, 'name': CreateSnapshot_Task, 'duration_secs': 0.621519} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.312384] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 801.313134] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8c7e9f-b9c8-454a-a982-6354020670a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.467458] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.469469] env[61839]: INFO nova.compute.manager [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Took 28.44 seconds to build instance. [ 801.556028] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.556294] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Cleaning up deleted instances with incomplete migration {{(pid=61839) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 801.592318] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.674523] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0935334-a534-494d-adf8-75338431c0b1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.682185] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9135a9ee-cb41-4834-99c4-1d03b34b310f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.710578] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245325bc-07e4-49e8-bc7a-ae67b0cf8a47 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.717433] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b93ad1-64e1-46c1-9995-24236dc817f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.730279] env[61839]: DEBUG nova.compute.provider_tree [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.744946] env[61839]: INFO nova.compute.manager [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Rescuing [ 801.745213] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.745387] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.745553] env[61839]: DEBUG nova.network.neutron [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.831402] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 801.833054] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cb02c21f-357f-4262-a2af-38b7763e9337 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.839958] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 801.839958] env[61839]: value = "task-1314358" [ 801.839958] env[61839]: _type = "Task" [ 801.839958] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.849661] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314358, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.972178] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdadb98b-f7ca-40ff-9eb5-8f15cbae89cd tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.023s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.060098] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.094959] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "refresh_cache-e81bf730-9cf6-4728-aae4-4962115f8b6f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.095285] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 802.095484] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 802.095740] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 802.113014] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.233628] env[61839]: DEBUG nova.scheduler.client.report [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 802.350323] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314358, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.477181] env[61839]: DEBUG nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 802.597438] env[61839]: DEBUG nova.network.neutron [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Updating instance_info_cache with network_info: [{"id": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "address": "fa:16:3e:31:8e:b6", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc601f8f-cb", "ovs_interfaceid": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.616800] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.740012] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.809s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.740692] env[61839]: ERROR nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Traceback (most recent call last): [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self.driver.spawn(context, instance, image_meta, [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self._vmops.spawn(context, instance, image_meta, injected_files, [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] vm_ref = self.build_virtual_machine(instance, [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] vif_infos = vmwarevif.get_vif_info(self._session, [ 802.740692] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] for vif in network_info: [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return self._sync_wrapper(fn, *args, **kwargs) [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self.wait() [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self[:] = self._gt.wait() [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return self._exit_event.wait() [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] result = hub.switch() [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 802.741074] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return self.greenlet.switch() [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] result = function(*args, **kwargs) [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] return func(*args, **kwargs) [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] raise e [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] nwinfo = self.network_api.allocate_for_instance( [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] created_port_ids = self._update_ports_for_instance( [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] with excutils.save_and_reraise_exception(): [ 802.741470] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] self.force_reraise() [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] raise self.value [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] updated_port = self._update_port( [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] _ensure_no_port_binding_failure(port) [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] raise exception.PortBindingFailed(port_id=port['id']) [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] nova.exception.PortBindingFailed: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. [ 802.741874] env[61839]: ERROR nova.compute.manager [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] [ 802.742220] env[61839]: DEBUG nova.compute.utils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 802.742897] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.997s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.744450] env[61839]: INFO nova.compute.claims [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 802.747102] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Build of instance 697222e0-07e5-4a3d-adbe-d5d815cf4756 was re-scheduled: Binding failed for port 962e76d2-2d5b-466c-9652-29ee2cfdd960, please check neutron logs for more information. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 802.747538] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 802.747757] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.747903] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.748074] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.852817] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314358, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.996849] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.100283] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.118876] env[61839]: INFO nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: e81bf730-9cf6-4728-aae4-4962115f8b6f] Took 1.02 seconds to deallocate network for instance. [ 803.274159] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.350838] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314358, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.366625] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.630962] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 803.630962] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6001c5c-b32a-4a60-a405-c55de6d7b15a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.637724] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 803.637724] env[61839]: value = "task-1314359" [ 803.637724] env[61839]: _type = "Task" [ 803.637724] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.647441] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314359, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.851939] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314358, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.871048] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "refresh_cache-697222e0-07e5-4a3d-adbe-d5d815cf4756" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.871334] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 803.871457] env[61839]: DEBUG nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 803.871673] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.887059] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.983532] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcc171d-c5cf-4408-846c-07158a1eedda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.990865] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763ecb5f-ec11-44db-ae80-b443ed817b18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.020292] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae2427e-2872-4dfd-a1d3-907561201ec2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.027352] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a429a56c-83c0-45d3-b66f-d788ec0d0f4a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.039910] env[61839]: DEBUG nova.compute.provider_tree [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.147812] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314359, 'name': PowerOffVM_Task, 'duration_secs': 0.321775} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.148093] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 804.148858] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b2ff47-0cea-447d-83c0-0a74803321c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.151923] env[61839]: INFO nova.scheduler.client.report [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleted allocations for instance e81bf730-9cf6-4728-aae4-4962115f8b6f [ 804.174696] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1580a0a0-a429-45e5-9681-4105715ff224 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.202186] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.202486] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-384ba229-55a6-4297-b248-4dbb9aa9a665 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.209016] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 804.209016] env[61839]: value = "task-1314360" [ 804.209016] env[61839]: _type = "Task" [ 804.209016] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.217481] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 804.218039] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.218039] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.218171] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.218329] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.218574] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8511280a-9948-4173-9ff8-6887f63e401f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.226092] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.226285] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 804.227045] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2c1037f-a272-4139-ba28-8c67d68ab8bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.232344] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 804.232344] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c31c7b-2168-6820-103a-9e4d1fafe458" [ 804.232344] env[61839]: _type = "Task" [ 804.232344] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.240229] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c31c7b-2168-6820-103a-9e4d1fafe458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.351248] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314358, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.390014] env[61839]: DEBUG nova.network.neutron [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.543182] env[61839]: DEBUG nova.scheduler.client.report [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.660078] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "e81bf730-9cf6-4728-aae4-4962115f8b6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 179.399s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.743968] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c31c7b-2168-6820-103a-9e4d1fafe458, 'name': SearchDatastore_Task, 'duration_secs': 0.008293} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.744794] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b389b82-ba84-4d5d-99cc-8105b964e053 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.749996] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 804.749996] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fd7bd3-095f-ab39-d40d-18a5b27525c5" [ 804.749996] env[61839]: _type = "Task" [ 804.749996] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.757733] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fd7bd3-095f-ab39-d40d-18a5b27525c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.851607] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314358, 'name': CloneVM_Task, 'duration_secs': 2.541647} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.851927] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Created linked-clone VM from snapshot [ 804.852659] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037531c3-6d6c-4314-ae0a-e725dd272878 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.859580] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Uploading image bde63672-3220-4238-b8e0-0b8b258879c6 {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 804.883073] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 804.883073] env[61839]: value = "vm-281323" [ 804.883073] env[61839]: _type = "VirtualMachine" [ 804.883073] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 804.883464] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ddf2da3a-4ea0-40c2-a906-9ff53b1c3bc5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.889227] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lease: (returnval){ [ 804.889227] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525b2704-573c-34ef-6583-51b247682c0a" [ 804.889227] env[61839]: _type = "HttpNfcLease" [ 804.889227] env[61839]: } obtained for exporting VM: (result){ [ 804.889227] env[61839]: value = "vm-281323" [ 804.889227] env[61839]: _type = "VirtualMachine" [ 804.889227] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 804.889485] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the lease: (returnval){ [ 804.889485] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525b2704-573c-34ef-6583-51b247682c0a" [ 804.889485] env[61839]: _type = "HttpNfcLease" [ 804.889485] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 804.892734] env[61839]: INFO nova.compute.manager [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: 697222e0-07e5-4a3d-adbe-d5d815cf4756] Took 1.02 seconds to deallocate network for instance. [ 804.898203] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 804.898203] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525b2704-573c-34ef-6583-51b247682c0a" [ 804.898203] env[61839]: _type = "HttpNfcLease" [ 804.898203] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 805.048053] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.048627] env[61839]: DEBUG nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 805.051239] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.088s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.052960] env[61839]: INFO nova.compute.claims [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.162924] env[61839]: DEBUG nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 805.264951] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fd7bd3-095f-ab39-d40d-18a5b27525c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009299} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.265372] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.265760] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk. {{(pid=61839) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 805.266156] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eade81a5-9fa5-4fb7-b116-fb905d2662f7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.273516] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 805.273516] env[61839]: value = "task-1314362" [ 805.273516] env[61839]: _type = "Task" [ 805.273516] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.283943] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.401044] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 805.401044] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525b2704-573c-34ef-6583-51b247682c0a" [ 805.401044] env[61839]: _type = "HttpNfcLease" [ 805.401044] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 805.401421] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 805.401421] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525b2704-573c-34ef-6583-51b247682c0a" [ 805.401421] env[61839]: _type = "HttpNfcLease" [ 805.401421] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 805.402197] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d718635-5344-4cfe-8936-a544363c6e28 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.410027] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1e5fc-89ee-9735-7bc5-925d451f781b/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 805.410119] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1e5fc-89ee-9735-7bc5-925d451f781b/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 805.538986] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-079e5f43-374c-4cbe-bbbc-17d766eebb8b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.557319] env[61839]: DEBUG nova.compute.utils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.567246] env[61839]: DEBUG nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 805.567789] env[61839]: DEBUG nova.network.neutron [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 805.650110] env[61839]: DEBUG nova.policy [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72a11321692d4692af854eabe0aca25a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7a300fe2748456bb4a522a4d7c0d0f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 805.685358] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.783389] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470548} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.783740] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk. [ 805.784532] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78efd4f-5294-41e7-94f4-b912e4e84b88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.809071] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.811040] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc5843f6-3b44-410c-8293-a3ffbb2b54d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.826693] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 805.826693] env[61839]: value = "task-1314363" [ 805.826693] env[61839]: _type = "Task" [ 805.826693] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.834662] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314363, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.930761] env[61839]: INFO nova.scheduler.client.report [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleted allocations for instance 697222e0-07e5-4a3d-adbe-d5d815cf4756 [ 806.068193] env[61839]: DEBUG nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 806.076957] env[61839]: DEBUG nova.network.neutron [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Successfully created port: ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.340079] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.421707] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2802c6b-7752-41b8-ab15-eae38da34f85 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.429873] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89c80a1-46ab-49be-9500-28dc780c6130 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.464625] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d7ee033d-2803-45eb-9965-f6766e5fb5bc tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "697222e0-07e5-4a3d-adbe-d5d815cf4756" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 181.174s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.467086] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edac0e7-c9d6-49c7-9a74-d668cdcd2f15 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.475022] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e9278e-6719-4e66-84a8-704c9da24d7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.490160] env[61839]: DEBUG nova.compute.provider_tree [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 806.839259] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314363, 'name': ReconfigVM_Task, 'duration_secs': 0.632535} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.839573] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.840686] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1255d69-4924-4a78-bd2c-2a55c8d26783 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.868590] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3b6037c-0a3c-4fac-aaef-6cf2923d8fa9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.884455] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 806.884455] env[61839]: value = "task-1314364" [ 806.884455] env[61839]: _type = "Task" [ 806.884455] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.893939] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.970816] env[61839]: DEBUG nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 807.015846] env[61839]: ERROR nova.scheduler.client.report [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [req-c81311e6-a9fc-41a6-bb1f-03acb18a12c7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c81311e6-a9fc-41a6-bb1f-03acb18a12c7"}]} [ 807.032100] env[61839]: DEBUG nova.scheduler.client.report [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 807.049236] env[61839]: DEBUG nova.scheduler.client.report [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 807.049482] env[61839]: DEBUG nova.compute.provider_tree [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 807.060902] env[61839]: DEBUG nova.scheduler.client.report [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 807.078922] env[61839]: DEBUG nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 807.083110] env[61839]: DEBUG nova.scheduler.client.report [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 807.106537] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 807.107199] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 807.107199] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.107670] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 807.107670] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.107798] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 807.107985] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 807.108169] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 807.108342] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 807.108513] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 807.108695] env[61839]: DEBUG nova.virt.hardware [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 807.109605] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7128f51d-7621-4c7b-ae75-5ba5efa247ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.119524] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6a144c-fafa-4108-9732-5b9860df7947 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.318745] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c7b4d4-d554-4c10-8c8c-079a4d52f620 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.326626] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60af8d91-e610-4238-bcb5-af9eaf7ee53f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.356509] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b9f937-8d04-412e-b4b5-91fb143abe16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.364120] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90c05bd-345f-442a-9cc0-0dd4c8f530ff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.378777] env[61839]: DEBUG nova.compute.provider_tree [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 807.393061] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314364, 'name': ReconfigVM_Task, 'duration_secs': 0.174165} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.393996] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.394294] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efae16d8-5c87-4b32-9384-e1905c2dc61e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.400920] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 807.400920] env[61839]: value = "task-1314365" [ 807.400920] env[61839]: _type = "Task" [ 807.400920] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.408769] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314365, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.492676] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.690527] env[61839]: DEBUG nova.compute.manager [req-aafc8f3b-9e77-451e-9d64-0a75bd91638e req-820ce652-f60e-4467-bd8b-011d0afd2d2d service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received event network-vif-plugged-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 807.690723] env[61839]: DEBUG oslo_concurrency.lockutils [req-aafc8f3b-9e77-451e-9d64-0a75bd91638e req-820ce652-f60e-4467-bd8b-011d0afd2d2d service nova] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.690931] env[61839]: DEBUG oslo_concurrency.lockutils [req-aafc8f3b-9e77-451e-9d64-0a75bd91638e req-820ce652-f60e-4467-bd8b-011d0afd2d2d service nova] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.691126] env[61839]: DEBUG oslo_concurrency.lockutils [req-aafc8f3b-9e77-451e-9d64-0a75bd91638e req-820ce652-f60e-4467-bd8b-011d0afd2d2d service nova] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.691285] env[61839]: DEBUG nova.compute.manager [req-aafc8f3b-9e77-451e-9d64-0a75bd91638e req-820ce652-f60e-4467-bd8b-011d0afd2d2d service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] No waiting events found dispatching network-vif-plugged-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 807.691451] env[61839]: WARNING nova.compute.manager [req-aafc8f3b-9e77-451e-9d64-0a75bd91638e req-820ce652-f60e-4467-bd8b-011d0afd2d2d service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received unexpected event network-vif-plugged-ef8176cf-7494-44f4-a600-7dedff162419 for instance with vm_state building and task_state spawning. [ 807.855085] env[61839]: DEBUG nova.network.neutron [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Successfully updated port: ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 807.912391] env[61839]: DEBUG oslo_vmware.api [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314365, 'name': PowerOnVM_Task, 'duration_secs': 0.476742} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.913040] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.916272] env[61839]: DEBUG nova.compute.manager [None req-ee9708ff-fbc6-428b-a53a-456e2b54cb6b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 807.917240] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e641f89-c87b-4230-9088-8db7ab5a309d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.926773] env[61839]: DEBUG nova.scheduler.client.report [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 76 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 807.929017] env[61839]: DEBUG nova.compute.provider_tree [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 76 to 77 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 807.929017] env[61839]: DEBUG nova.compute.provider_tree [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 808.357835] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.358173] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.358245] env[61839]: DEBUG nova.network.neutron [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 808.434802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.383s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.436034] env[61839]: DEBUG nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 808.439627] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.941s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.439955] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.444165] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.381s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.445536] env[61839]: INFO nova.compute.claims [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.465879] env[61839]: INFO nova.scheduler.client.report [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Deleted allocations for instance 0bc0eefd-8a56-4cd6-a0b5-818cc437d917 [ 808.586379] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.586379] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.614644] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "c180cc04-79da-4529-a905-1985a85b7cf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.614644] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "c180cc04-79da-4529-a905-1985a85b7cf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.906702] env[61839]: DEBUG nova.network.neutron [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.951268] env[61839]: DEBUG nova.compute.utils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 808.957187] env[61839]: DEBUG nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 808.957365] env[61839]: DEBUG nova.network.neutron [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 808.974990] env[61839]: DEBUG oslo_concurrency.lockutils [None req-78cb2bb4-fa32-4f8b-ac66-631fd240ea4c tempest-ServersAaction247Test-1570847378 tempest-ServersAaction247Test-1570847378-project-member] Lock "0bc0eefd-8a56-4cd6-a0b5-818cc437d917" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.196s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.034520] env[61839]: DEBUG nova.policy [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '430b14eaa0e94ef39fb0f95269448ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25686a503d044467a1d641f14e14c65c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 809.380545] env[61839]: DEBUG nova.network.neutron [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.458050] env[61839]: DEBUG nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 809.648431] env[61839]: DEBUG nova.network.neutron [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Successfully created port: 2adc8fda-8606-45a2-95cb-f775d70870e8 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.742928] env[61839]: DEBUG nova.compute.manager [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received event network-changed-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 809.742928] env[61839]: DEBUG nova.compute.manager [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Refreshing instance network info cache due to event network-changed-ef8176cf-7494-44f4-a600-7dedff162419. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 809.742928] env[61839]: DEBUG oslo_concurrency.lockutils [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.776213] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44dc1468-01bb-4ea2-bd2c-9a8ff90cf09b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.783441] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91195655-392c-4009-be76-af6d61e34b1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.819230] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34848df0-2674-47ea-8757-91728073e277 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.826982] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b9bfab-279c-4397-b2eb-5441b817e0ee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.843130] env[61839]: DEBUG nova.compute.provider_tree [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.883618] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.884135] env[61839]: DEBUG nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Instance network_info: |[{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 809.885039] env[61839]: DEBUG oslo_concurrency.lockutils [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.885039] env[61839]: DEBUG nova.network.neutron [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Refreshing network info cache for port ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 809.886184] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:30:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef8176cf-7494-44f4-a600-7dedff162419', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 809.893886] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating folder: Project (c7a300fe2748456bb4a522a4d7c0d0f4). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.895262] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8aaa1eb-050a-4117-8a87-9372f917b1bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.907712] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Created folder: Project (c7a300fe2748456bb4a522a4d7c0d0f4) in parent group-v281288. [ 809.907978] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating folder: Instances. Parent ref: group-v281324. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.908257] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e4444e6-2bdf-4532-9733-e676111d42ca {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.918296] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Created folder: Instances in parent group-v281324. [ 809.918631] env[61839]: DEBUG oslo.service.loopingcall [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.918922] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 809.919182] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dd71e92-ce72-45a1-a0ea-e28dd2b87e33 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.945554] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 809.945554] env[61839]: value = "task-1314368" [ 809.945554] env[61839]: _type = "Task" [ 809.945554] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.956133] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314368, 'name': CreateVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.141803] env[61839]: INFO nova.compute.manager [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Rescuing [ 810.142187] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.143322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.143322] env[61839]: DEBUG nova.network.neutron [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 810.347459] env[61839]: DEBUG nova.scheduler.client.report [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 810.457676] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314368, 'name': CreateVM_Task, 'duration_secs': 0.338416} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.460736] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 810.460736] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.460736] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.460736] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 810.460736] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09f91d3b-46ce-4a78-929f-d0df919f2e04 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.468157] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 810.468157] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524fd98e-1a30-4fb4-04c0-593b16d31331" [ 810.468157] env[61839]: _type = "Task" [ 810.468157] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.472133] env[61839]: DEBUG nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 810.481707] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524fd98e-1a30-4fb4-04c0-593b16d31331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.502031] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 810.502031] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 810.502463] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.502791] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 810.503107] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.503393] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 810.503819] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 810.504135] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 810.504431] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 810.504711] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 810.504999] env[61839]: DEBUG nova.virt.hardware [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 810.506836] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a78ca4-645e-495c-81c4-14b4bf0bb0c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.515076] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5be1c9-7bfe-4fbb-8291-70ab7cd8c11b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.810046] env[61839]: DEBUG nova.network.neutron [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updated VIF entry in instance network info cache for port ef8176cf-7494-44f4-a600-7dedff162419. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 810.810491] env[61839]: DEBUG nova.network.neutron [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.853880] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.854410] env[61839]: DEBUG nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 810.857725] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.960s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.859113] env[61839]: INFO nova.compute.claims [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.976857] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524fd98e-1a30-4fb4-04c0-593b16d31331, 'name': SearchDatastore_Task, 'duration_secs': 0.013809} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.977185] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.977423] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 810.977655] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.977832] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.978077] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 810.978366] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d76d7f2d-ba2d-405a-89d0-5e8dfe64ecd1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.988060] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.988182] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.988871] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea4c58fd-91ac-473d-b443-29762ae1b8da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.993615] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 810.993615] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52172004-3afa-22f9-7190-4de1bea68ac9" [ 810.993615] env[61839]: _type = "Task" [ 810.993615] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.001059] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52172004-3afa-22f9-7190-4de1bea68ac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.014576] env[61839]: DEBUG nova.network.neutron [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updating instance_info_cache with network_info: [{"id": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "address": "fa:16:3e:5a:64:4f", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e228a-75", "ovs_interfaceid": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.314848] env[61839]: DEBUG oslo_concurrency.lockutils [req-50df4b9e-9d6c-4ac6-844e-355897184908 req-f4c6ba10-dac7-4a49-9769-3c908f98f311 service nova] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.363701] env[61839]: DEBUG nova.compute.utils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 811.367317] env[61839]: DEBUG nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 811.367492] env[61839]: DEBUG nova.network.neutron [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 811.377129] env[61839]: DEBUG nova.network.neutron [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Successfully updated port: 2adc8fda-8606-45a2-95cb-f775d70870e8 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.417698] env[61839]: DEBUG nova.policy [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8abcff2ffe534da3983ec78c3671110d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f789f3900a347b59c491e9d141fb9e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 811.504450] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52172004-3afa-22f9-7190-4de1bea68ac9, 'name': SearchDatastore_Task, 'duration_secs': 0.037106} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.505255] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aff39ab-0903-4891-a917-22bbf9b0dd14 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.510164] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 811.510164] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5233cd17-db45-1533-ee42-b7b9fc2af8c9" [ 811.510164] env[61839]: _type = "Task" [ 811.510164] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.518747] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.520885] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5233cd17-db45-1533-ee42-b7b9fc2af8c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.821317] env[61839]: DEBUG nova.network.neutron [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Successfully created port: bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.827324] env[61839]: DEBUG nova.compute.manager [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Received event network-vif-plugged-2adc8fda-8606-45a2-95cb-f775d70870e8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.827661] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] Acquiring lock "a661cc10-5c4e-421b-b70b-189f0a613e8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.827958] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.828232] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.828487] env[61839]: DEBUG nova.compute.manager [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] No waiting events found dispatching network-vif-plugged-2adc8fda-8606-45a2-95cb-f775d70870e8 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 811.828732] env[61839]: WARNING nova.compute.manager [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Received unexpected event network-vif-plugged-2adc8fda-8606-45a2-95cb-f775d70870e8 for instance with vm_state building and task_state spawning. [ 811.829136] env[61839]: DEBUG nova.compute.manager [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Received event network-changed-2adc8fda-8606-45a2-95cb-f775d70870e8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.829343] env[61839]: DEBUG nova.compute.manager [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Refreshing instance network info cache due to event network-changed-2adc8fda-8606-45a2-95cb-f775d70870e8. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 811.829578] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] Acquiring lock "refresh_cache-a661cc10-5c4e-421b-b70b-189f0a613e8a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.829757] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] Acquired lock "refresh_cache-a661cc10-5c4e-421b-b70b-189f0a613e8a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.829958] env[61839]: DEBUG nova.network.neutron [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Refreshing network info cache for port 2adc8fda-8606-45a2-95cb-f775d70870e8 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 811.868367] env[61839]: DEBUG nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 811.881989] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-a661cc10-5c4e-421b-b70b-189f0a613e8a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.021893] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5233cd17-db45-1533-ee42-b7b9fc2af8c9, 'name': SearchDatastore_Task, 'duration_secs': 0.040867} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.024550] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.024831] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 812.025322] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3da86680-5062-4d28-b59c-c63b69c90266 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.032623] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 812.032623] env[61839]: value = "task-1314369" [ 812.032623] env[61839]: _type = "Task" [ 812.032623] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.045630] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.046137] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.046390] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b633c75-f3f9-49e0-b283-0207f4d44acf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.052777] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 812.052777] env[61839]: value = "task-1314370" [ 812.052777] env[61839]: _type = "Task" [ 812.052777] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.063331] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.150020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4debdc5-6d0a-4a21-b2fa-0d173b257721 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.157683] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e607d401-e51f-4d5e-a37e-9f331cd1039a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.192482] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffbd3fe-cefb-4448-8690-a530f67769be {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.199058] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae799723-abea-4d87-85f1-8512463ab021 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.214670] env[61839]: DEBUG nova.compute.provider_tree [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.384195] env[61839]: DEBUG nova.network.neutron [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.482497] env[61839]: DEBUG nova.network.neutron [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.543095] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314369, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.564478] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314370, 'name': PowerOffVM_Task, 'duration_secs': 0.185276} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.564478] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 812.564478] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2553cb-6b96-46c5-a082-dc6cfd05b9df {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.584445] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e794aa-7fa6-4da0-aef8-d9f9ed71a41a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.611641] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.611987] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-444ecc58-1c47-49de-9172-feeb4400f651 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.618599] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 812.618599] env[61839]: value = "task-1314371" [ 812.618599] env[61839]: _type = "Task" [ 812.618599] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.627159] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.718712] env[61839]: DEBUG nova.scheduler.client.report [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 812.881399] env[61839]: DEBUG nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 812.901652] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 812.901964] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 812.902136] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.902655] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 812.902861] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.903076] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 812.903310] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 812.903482] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 812.903757] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 812.904017] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 812.904214] env[61839]: DEBUG nova.virt.hardware [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 812.905256] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac77bb39-f32c-4603-b53b-7570e48e5e30 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.913762] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9cca5a-9cf5-4d60-b344-6e2dd1625c6e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.985111] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c7cd2cf-8463-48b9-ace7-5a04050eda14 req-e71fd326-7711-4ac0-a4f9-3c609b836d37 service nova] Releasing lock "refresh_cache-a661cc10-5c4e-421b-b70b-189f0a613e8a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.985524] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-a661cc10-5c4e-421b-b70b-189f0a613e8a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.985688] env[61839]: DEBUG nova.network.neutron [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 813.043303] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314369, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533041} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.043601] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 813.043794] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 813.044071] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ffa8c57-2238-4283-a592-9192b07686f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.050688] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 813.050688] env[61839]: value = "task-1314372" [ 813.050688] env[61839]: _type = "Task" [ 813.050688] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.059720] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314372, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.128909] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 813.129195] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 813.129452] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.129960] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.129960] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.130184] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fdd2138-1238-407d-80e7-07f5e0d073f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.139220] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 813.139466] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 813.140254] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd7d2ba3-f048-401f-9eed-78bc81621d07 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.145715] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 813.145715] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522fd715-b9af-3dca-958c-cf47befcc990" [ 813.145715] env[61839]: _type = "Task" [ 813.145715] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.153718] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522fd715-b9af-3dca-958c-cf47befcc990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.223923] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.224568] env[61839]: DEBUG nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 813.228113] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.315s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.230413] env[61839]: INFO nova.compute.claims [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.543918] env[61839]: DEBUG nova.network.neutron [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.566152] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314372, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135685} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.568448] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 813.569252] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf44d38-4915-46e4-8a3c-ca1da39f533b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.599379] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 813.599649] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e014891-d95a-4268-8f2a-f9dfb1182fa3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.621252] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 813.621252] env[61839]: value = "task-1314373" [ 813.621252] env[61839]: _type = "Task" [ 813.621252] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.633084] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314373, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.657387] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522fd715-b9af-3dca-958c-cf47befcc990, 'name': SearchDatastore_Task, 'duration_secs': 0.021076} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.658307] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d69d56e3-9033-4d6b-b921-b25006310d5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.663775] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 813.663775] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523e01dc-01c0-c9e8-fd60-d878be6621e4" [ 813.663775] env[61839]: _type = "Task" [ 813.663775] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.669756] env[61839]: DEBUG nova.network.neutron [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Successfully updated port: bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 813.679343] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523e01dc-01c0-c9e8-fd60-d878be6621e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.729294] env[61839]: DEBUG nova.compute.utils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 813.734027] env[61839]: DEBUG nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 813.734027] env[61839]: DEBUG nova.network.neutron [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 813.788147] env[61839]: DEBUG nova.network.neutron [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Updating instance_info_cache with network_info: [{"id": "2adc8fda-8606-45a2-95cb-f775d70870e8", "address": "fa:16:3e:35:ff:ed", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc8fda-86", "ovs_interfaceid": "2adc8fda-8606-45a2-95cb-f775d70870e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.811452] env[61839]: DEBUG nova.policy [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b9ca67c278b4cb9a83ec3c6ce42af5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5aba1e066cb4400dbbacc92f393962e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 813.854806] env[61839]: DEBUG nova.compute.manager [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Received event network-vif-plugged-bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 813.857429] env[61839]: DEBUG oslo_concurrency.lockutils [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] Acquiring lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.857429] env[61839]: DEBUG oslo_concurrency.lockutils [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.857429] env[61839]: DEBUG oslo_concurrency.lockutils [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.857429] env[61839]: DEBUG nova.compute.manager [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] No waiting events found dispatching network-vif-plugged-bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 813.857429] env[61839]: WARNING nova.compute.manager [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Received unexpected event network-vif-plugged-bc315481-8651-4be3-bdd5-269b569b2817 for instance with vm_state building and task_state spawning. [ 813.857740] env[61839]: DEBUG nova.compute.manager [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Received event network-changed-bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 813.857740] env[61839]: DEBUG nova.compute.manager [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Refreshing instance network info cache due to event network-changed-bc315481-8651-4be3-bdd5-269b569b2817. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 813.857740] env[61839]: DEBUG oslo_concurrency.lockutils [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] Acquiring lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.857740] env[61839]: DEBUG oslo_concurrency.lockutils [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] Acquired lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.857740] env[61839]: DEBUG nova.network.neutron [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Refreshing network info cache for port bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.131534] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314373, 'name': ReconfigVM_Task, 'duration_secs': 0.398646} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.131925] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 814.132584] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4ddcd31-1549-435e-bf4d-660d2ddc2e01 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.139246] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 814.139246] env[61839]: value = "task-1314374" [ 814.139246] env[61839]: _type = "Task" [ 814.139246] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.147224] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314374, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.176949] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523e01dc-01c0-c9e8-fd60-d878be6621e4, 'name': SearchDatastore_Task, 'duration_secs': 0.021267} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.176949] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.177091] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk. {{(pid=61839) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 814.177392] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6884247d-bd64-4efa-a99c-1a7b4718cb9f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.184678] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.185149] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 814.185149] env[61839]: value = "task-1314375" [ 814.185149] env[61839]: _type = "Task" [ 814.185149] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.193012] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314375, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.201386] env[61839]: DEBUG nova.network.neutron [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Successfully created port: a65466f1-75e9-41dd-8045-71a68e9b31d2 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.235283] env[61839]: DEBUG nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 814.292494] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-a661cc10-5c4e-421b-b70b-189f0a613e8a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.292824] env[61839]: DEBUG nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Instance network_info: |[{"id": "2adc8fda-8606-45a2-95cb-f775d70870e8", "address": "fa:16:3e:35:ff:ed", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc8fda-86", "ovs_interfaceid": "2adc8fda-8606-45a2-95cb-f775d70870e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 814.294199] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:ff:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2adc8fda-8606-45a2-95cb-f775d70870e8', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 814.301829] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating folder: Project (25686a503d044467a1d641f14e14c65c). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 814.302826] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f2932bd-3f0b-4057-886e-436225e4094a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.313070] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created folder: Project (25686a503d044467a1d641f14e14c65c) in parent group-v281288. [ 814.313285] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating folder: Instances. Parent ref: group-v281327. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 814.315934] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddf52324-ce6a-4676-9b39-34bf9e162d36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.331101] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created folder: Instances in parent group-v281327. [ 814.331368] env[61839]: DEBUG oslo.service.loopingcall [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.331576] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 814.331800] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c9c34b2-a080-4597-bf19-eb5696bbc622 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.355374] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.355374] env[61839]: value = "task-1314378" [ 814.355374] env[61839]: _type = "Task" [ 814.355374] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.369919] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314378, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.427259] env[61839]: DEBUG nova.network.neutron [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.557044] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c86233e-56b7-4d7b-8ada-d04a139bac0f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.566970] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770509bb-86a2-4982-82c8-4f4d52aa536c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.600079] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac455696-42fc-444c-a9e6-db10acb0beee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.608254] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b602e465-0a04-4605-80aa-f1e976289c2a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.623669] env[61839]: DEBUG nova.compute.provider_tree [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.648194] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314374, 'name': Rename_Task, 'duration_secs': 0.152544} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.648573] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.648838] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-785f6266-b441-443f-9134-87c0c3046e7b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.657600] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 814.657600] env[61839]: value = "task-1314379" [ 814.657600] env[61839]: _type = "Task" [ 814.657600] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.665950] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314379, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.697396] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314375, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.830400] env[61839]: DEBUG nova.network.neutron [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.868213] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314378, 'name': CreateVM_Task, 'duration_secs': 0.336592} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.868413] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.869250] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.871571] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.871571] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 814.871571] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4d363b1-cd71-43e2-87b0-db2812e0e1bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.874892] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 814.874892] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528dd7fa-6222-d7a1-07e5-5ec0946d5bc9" [ 814.874892] env[61839]: _type = "Task" [ 814.874892] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.883224] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528dd7fa-6222-d7a1-07e5-5ec0946d5bc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.127556] env[61839]: DEBUG nova.scheduler.client.report [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.168165] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314379, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.196071] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314375, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607885} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.198073] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk. [ 815.198073] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f437a1a-972f-4e02-8d06-ab3cd1250f63 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.224029] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.224355] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f36f44f0-bd92-426d-a2b1-020bf5c6a6bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.242629] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 815.242629] env[61839]: value = "task-1314380" [ 815.242629] env[61839]: _type = "Task" [ 815.242629] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.247463] env[61839]: DEBUG nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 815.255451] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314380, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.270619] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 815.270897] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 815.271092] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.271387] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 815.271602] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.271750] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 815.272031] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 815.272203] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 815.272397] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 815.272570] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 815.272766] env[61839]: DEBUG nova.virt.hardware [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 815.273691] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95fb8c6-f3ad-44b7-9ad2-65445c716084 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.281889] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf37b1e4-17c8-43f4-bebb-b65a0311062d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.334676] env[61839]: DEBUG oslo_concurrency.lockutils [req-edd33b90-b82f-42de-95c8-30ecffc5cc86 req-7f2737de-0414-4417-a996-6f1fb707799f service nova] Releasing lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.335185] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.335422] env[61839]: DEBUG nova.network.neutron [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.385805] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528dd7fa-6222-d7a1-07e5-5ec0946d5bc9, 'name': SearchDatastore_Task, 'duration_secs': 0.01817} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.386170] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.386419] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.386683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.386817] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.387009] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.387303] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0362d9dd-810e-4bee-a52a-11abe563285c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.398269] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.398464] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 815.399215] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22cfe4a1-c228-4914-a31f-ab97186f1fbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.404336] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 815.404336] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523b42c6-b830-36eb-637a-5ebc7c33e6c8" [ 815.404336] env[61839]: _type = "Task" [ 815.404336] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.412336] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523b42c6-b830-36eb-637a-5ebc7c33e6c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.631859] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.632535] env[61839]: DEBUG nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 815.635264] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.612s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.636840] env[61839]: INFO nova.compute.claims [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.669420] env[61839]: DEBUG oslo_vmware.api [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314379, 'name': PowerOnVM_Task, 'duration_secs': 1.009854} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.669711] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.670062] env[61839]: INFO nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Took 8.59 seconds to spawn the instance on the hypervisor. [ 815.670128] env[61839]: DEBUG nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 815.670957] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94df91d-4fe6-4a9f-bb83-a93a66fecd7f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.752332] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314380, 'name': ReconfigVM_Task, 'duration_secs': 0.358494} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.752620] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 815.753469] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e0c1b2-b2c9-45e4-aea3-9fcb555479e3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.781110] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d77bcb19-5284-4641-a226-af71428b2cb7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.794583] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 815.794583] env[61839]: value = "task-1314381" [ 815.794583] env[61839]: _type = "Task" [ 815.794583] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.802879] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314381, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.878379] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1e5fc-89ee-9735-7bc5-925d451f781b/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 815.879371] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f7d139-d845-4120-88c7-057797beda47 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.885665] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1e5fc-89ee-9735-7bc5-925d451f781b/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 815.885898] env[61839]: ERROR oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1e5fc-89ee-9735-7bc5-925d451f781b/disk-0.vmdk due to incomplete transfer. [ 815.886161] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5c361b16-1df0-42c7-896e-afa75b29612e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.892971] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1e5fc-89ee-9735-7bc5-925d451f781b/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 815.892971] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Uploaded image bde63672-3220-4238-b8e0-0b8b258879c6 to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 815.895107] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 815.895306] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-63324752-3d63-4103-b468-7ea3e5baeb8e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.901313] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 815.901313] env[61839]: value = "task-1314382" [ 815.901313] env[61839]: _type = "Task" [ 815.901313] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.912477] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314382, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.916266] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523b42c6-b830-36eb-637a-5ebc7c33e6c8, 'name': SearchDatastore_Task, 'duration_secs': 0.018944} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.916990] env[61839]: DEBUG nova.network.neutron [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.919256] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b6aa9ca-f77b-4800-9802-11803fb18ba9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.924442] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 815.924442] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5243e400-e153-164a-b6c4-65adfa2d89a6" [ 815.924442] env[61839]: _type = "Task" [ 815.924442] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.933087] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5243e400-e153-164a-b6c4-65adfa2d89a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.141420] env[61839]: DEBUG nova.compute.utils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 816.144879] env[61839]: DEBUG nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 816.145063] env[61839]: DEBUG nova.network.neutron [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.189418] env[61839]: INFO nova.compute.manager [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Took 33.46 seconds to build instance. [ 816.277212] env[61839]: DEBUG nova.policy [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8677a31386a54087b2328734c2eadeb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8935bcc7ee644cb7a2a33557a708189c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 816.304935] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314381, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.311597] env[61839]: DEBUG nova.network.neutron [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [{"id": "bc315481-8651-4be3-bdd5-269b569b2817", "address": "fa:16:3e:ce:61:f0", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc315481-86", "ovs_interfaceid": "bc315481-8651-4be3-bdd5-269b569b2817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.411890] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314382, 'name': Destroy_Task, 'duration_secs': 0.351857} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.412236] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Destroyed the VM [ 816.412625] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 816.412915] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-682c285a-9175-4026-a74a-8c6c9e2cf3ef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.418806] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 816.418806] env[61839]: value = "task-1314383" [ 816.418806] env[61839]: _type = "Task" [ 816.418806] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.431702] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314383, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.440035] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5243e400-e153-164a-b6c4-65adfa2d89a6, 'name': SearchDatastore_Task, 'duration_secs': 0.011164} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.440035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.440035] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a661cc10-5c4e-421b-b70b-189f0a613e8a/a661cc10-5c4e-421b-b70b-189f0a613e8a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 816.440035] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0329dbbc-18ee-4108-8395-66d3b39646fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.443949] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 816.443949] env[61839]: value = "task-1314384" [ 816.443949] env[61839]: _type = "Task" [ 816.443949] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.452053] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.564692] env[61839]: DEBUG nova.compute.manager [req-9223d3c0-6a9b-4c6f-819e-7f6bb8dcf4fb req-5dc8be34-5542-4270-8fd6-b4411e7f7237 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Received event network-vif-plugged-a65466f1-75e9-41dd-8045-71a68e9b31d2 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 816.564692] env[61839]: DEBUG oslo_concurrency.lockutils [req-9223d3c0-6a9b-4c6f-819e-7f6bb8dcf4fb req-5dc8be34-5542-4270-8fd6-b4411e7f7237 service nova] Acquiring lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.564692] env[61839]: DEBUG oslo_concurrency.lockutils [req-9223d3c0-6a9b-4c6f-819e-7f6bb8dcf4fb req-5dc8be34-5542-4270-8fd6-b4411e7f7237 service nova] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.564692] env[61839]: DEBUG oslo_concurrency.lockutils [req-9223d3c0-6a9b-4c6f-819e-7f6bb8dcf4fb req-5dc8be34-5542-4270-8fd6-b4411e7f7237 service nova] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.564692] env[61839]: DEBUG nova.compute.manager [req-9223d3c0-6a9b-4c6f-819e-7f6bb8dcf4fb req-5dc8be34-5542-4270-8fd6-b4411e7f7237 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] No waiting events found dispatching network-vif-plugged-a65466f1-75e9-41dd-8045-71a68e9b31d2 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 816.565129] env[61839]: WARNING nova.compute.manager [req-9223d3c0-6a9b-4c6f-819e-7f6bb8dcf4fb req-5dc8be34-5542-4270-8fd6-b4411e7f7237 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Received unexpected event network-vif-plugged-a65466f1-75e9-41dd-8045-71a68e9b31d2 for instance with vm_state building and task_state spawning. [ 816.645801] env[61839]: DEBUG nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 816.690757] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1a15c259-b1dd-44bd-bdfc-6fbb7bd80a7c tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 145.860s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.812149] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314381, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.813958] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.814250] env[61839]: DEBUG nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Instance network_info: |[{"id": "bc315481-8651-4be3-bdd5-269b569b2817", "address": "fa:16:3e:ce:61:f0", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc315481-86", "ovs_interfaceid": "bc315481-8651-4be3-bdd5-269b569b2817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 816.814703] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:61:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc315481-8651-4be3-bdd5-269b569b2817', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 816.827512] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating folder: Project (5f789f3900a347b59c491e9d141fb9e7). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.831074] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ab872f6-a9f2-4a57-9fd9-91ba5aac7cd6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.834739] env[61839]: DEBUG nova.network.neutron [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Successfully updated port: a65466f1-75e9-41dd-8045-71a68e9b31d2 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.848131] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Created folder: Project (5f789f3900a347b59c491e9d141fb9e7) in parent group-v281288. [ 816.848370] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating folder: Instances. Parent ref: group-v281330. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.849425] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72105c24-305d-4b6b-9b98-9f24a9fb5b8a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.863628] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Created folder: Instances in parent group-v281330. [ 816.863998] env[61839]: DEBUG oslo.service.loopingcall [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.864338] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 816.867663] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4ff1d97-f9ab-4e82-9957-41a67ae41239 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.889563] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 816.889563] env[61839]: value = "task-1314387" [ 816.889563] env[61839]: _type = "Task" [ 816.889563] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.900527] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314387, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.931755] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314383, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.954970] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314384, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.971345] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb93afda-b8a6-491d-9540-6062e7a9b8ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.979123] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16004ca-b89b-4cea-afa2-036d0fea4293 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.023553] env[61839]: DEBUG nova.network.neutron [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Successfully created port: 6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.028953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40f5c79-02ca-4172-ab71-06192042e71e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.034539] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1fb758-3022-4310-8cb7-94b53ba2e0df {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.050036] env[61839]: DEBUG nova.compute.provider_tree [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.193701] env[61839]: DEBUG nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 817.306253] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314381, 'name': ReconfigVM_Task, 'duration_secs': 1.488149} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.306529] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.306992] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19c93c25-e6ce-44b7-943d-b908fc0f9c2b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.313477] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 817.313477] env[61839]: value = "task-1314388" [ 817.313477] env[61839]: _type = "Task" [ 817.313477] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.321725] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314388, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.338855] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.339084] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.339601] env[61839]: DEBUG nova.network.neutron [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.399416] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314387, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.430435] env[61839]: DEBUG oslo_vmware.api [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314383, 'name': RemoveSnapshot_Task, 'duration_secs': 0.812142} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.430839] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 817.431280] env[61839]: INFO nova.compute.manager [None req-3a0aa598-9065-4c6e-8d67-a77a540fab55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Took 17.68 seconds to snapshot the instance on the hypervisor. [ 817.457821] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314384, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587539} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.458337] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a661cc10-5c4e-421b-b70b-189f0a613e8a/a661cc10-5c4e-421b-b70b-189f0a613e8a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 817.458733] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 817.459127] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1194cf31-b5e0-4a44-b795-55d2f28f047e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.466339] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 817.466339] env[61839]: value = "task-1314389" [ 817.466339] env[61839]: _type = "Task" [ 817.466339] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.475652] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.556651] env[61839]: DEBUG nova.scheduler.client.report [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.659021] env[61839]: DEBUG nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 817.687054] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.687211] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.687354] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.687480] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.687628] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.687793] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.688210] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.688503] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.688583] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.688755] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.688938] env[61839]: DEBUG nova.virt.hardware [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.689818] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa91d33-21b2-4c8b-818e-4db1db0fd265 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.701776] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb1e6a8-cee0-4ae0-b4e8-74d188d9b6f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.720409] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.743485] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "c996d7db-4b73-4445-9989-4efb2cd852e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.743719] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.824140] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314388, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.899505] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314387, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.901428] env[61839]: DEBUG nova.network.neutron [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.976214] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074832} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.976516] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.977268] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66f2162-4ff7-43ae-aa6f-6e19e1166263 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.000931] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] a661cc10-5c4e-421b-b70b-189f0a613e8a/a661cc10-5c4e-421b-b70b-189f0a613e8a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.001269] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99612eb6-69d8-4e78-afac-159d58ad337a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.021157] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 818.021157] env[61839]: value = "task-1314390" [ 818.021157] env[61839]: _type = "Task" [ 818.021157] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.032601] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314390, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.062428] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.062428] env[61839]: DEBUG nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 818.065058] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.952s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.066424] env[61839]: INFO nova.compute.claims [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.080750] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "e65da0fd-e877-4b25-a319-e4d65397056a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.081018] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "e65da0fd-e877-4b25-a319-e4d65397056a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.081244] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "e65da0fd-e877-4b25-a319-e4d65397056a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.081445] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "e65da0fd-e877-4b25-a319-e4d65397056a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.081606] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "e65da0fd-e877-4b25-a319-e4d65397056a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.083760] env[61839]: INFO nova.compute.manager [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Terminating instance [ 818.085674] env[61839]: DEBUG nova.compute.manager [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 818.085937] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 818.086801] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9e1458-6883-4b57-9c2a-2d818e74f71c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.099497] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 818.099497] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da5af0ec-c660-4084-b8a6-8ef67ed3de65 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.144816] env[61839]: DEBUG nova.network.neutron [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Updating instance_info_cache with network_info: [{"id": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "address": "fa:16:3e:9a:42:3c", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa65466f1-75", "ovs_interfaceid": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.239360] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 818.239602] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 818.239797] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleting the datastore file [datastore1] e65da0fd-e877-4b25-a319-e4d65397056a {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 818.240094] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69ed2b48-0340-4b16-91a1-d428b5f2b6ad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.247311] env[61839]: DEBUG oslo_vmware.api [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 818.247311] env[61839]: value = "task-1314392" [ 818.247311] env[61839]: _type = "Task" [ 818.247311] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.258589] env[61839]: DEBUG oslo_vmware.api [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.324603] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314388, 'name': PowerOnVM_Task} progress is 91%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.400731] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314387, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.531121] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314390, 'name': ReconfigVM_Task, 'duration_secs': 0.318887} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.531421] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Reconfigured VM instance instance-0000003b to attach disk [datastore2] a661cc10-5c4e-421b-b70b-189f0a613e8a/a661cc10-5c4e-421b-b70b-189f0a613e8a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 818.532225] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84d0e35f-6f2c-4941-b282-ce89d03b230b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.538498] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 818.538498] env[61839]: value = "task-1314393" [ 818.538498] env[61839]: _type = "Task" [ 818.538498] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.546394] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314393, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.570914] env[61839]: DEBUG nova.compute.utils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 818.574487] env[61839]: DEBUG nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 818.574669] env[61839]: DEBUG nova.network.neutron [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 818.648016] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.648405] env[61839]: DEBUG nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Instance network_info: |[{"id": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "address": "fa:16:3e:9a:42:3c", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa65466f1-75", "ovs_interfaceid": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 818.650809] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:42:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a65466f1-75e9-41dd-8045-71a68e9b31d2', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.658663] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating folder: Project (5aba1e066cb4400dbbacc92f393962e6). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.659856] env[61839]: DEBUG nova.compute.manager [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Received event network-changed-a65466f1-75e9-41dd-8045-71a68e9b31d2 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 818.660114] env[61839]: DEBUG nova.compute.manager [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Refreshing instance network info cache due to event network-changed-a65466f1-75e9-41dd-8045-71a68e9b31d2. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 818.660293] env[61839]: DEBUG oslo_concurrency.lockutils [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] Acquiring lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.660442] env[61839]: DEBUG oslo_concurrency.lockutils [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] Acquired lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.660604] env[61839]: DEBUG nova.network.neutron [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Refreshing network info cache for port a65466f1-75e9-41dd-8045-71a68e9b31d2 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.661962] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d281f65c-8c1c-43de-8df4-b3e3a952eea2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.673325] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created folder: Project (5aba1e066cb4400dbbacc92f393962e6) in parent group-v281288. [ 818.673531] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating folder: Instances. Parent ref: group-v281333. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.673789] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-363a226a-7666-4c51-8df2-82f05920dc65 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.683732] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created folder: Instances in parent group-v281333. [ 818.684025] env[61839]: DEBUG oslo.service.loopingcall [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 818.685522] env[61839]: DEBUG nova.policy [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f5f3bcf31be4512ae65d3a817d06de6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebb68c2dc9904844a7549b63ac2cf0fc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 818.686944] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.687462] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5011d0b-b88e-4124-8092-da5177f9b576 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.707620] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.707620] env[61839]: value = "task-1314396" [ 818.707620] env[61839]: _type = "Task" [ 818.707620] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.715324] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314396, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.758308] env[61839]: DEBUG oslo_vmware.api [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153823} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.758604] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 818.758839] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 818.759074] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 818.759305] env[61839]: INFO nova.compute.manager [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Took 0.67 seconds to destroy the instance on the hypervisor. [ 818.759587] env[61839]: DEBUG oslo.service.loopingcall [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 818.759790] env[61839]: DEBUG nova.compute.manager [-] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 818.759896] env[61839]: DEBUG nova.network.neutron [-] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.765785] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.766049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.827859] env[61839]: DEBUG oslo_vmware.api [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314388, 'name': PowerOnVM_Task, 'duration_secs': 1.076199} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.828120] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.830993] env[61839]: DEBUG nova.compute.manager [None req-a3c3e5d4-86a4-4921-beca-8a309bae8dfa tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 818.832129] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751ab44b-ac81-4051-abc6-9635de667c62 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.900363] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314387, 'name': CreateVM_Task, 'duration_secs': 1.811185} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.900544] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.901289] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.901535] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.901805] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.902349] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5439d99c-687a-4da6-bbe3-46821d503d5e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.907120] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 818.907120] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5230ba1b-921f-5a9f-0ce3-2074d999d2d5" [ 818.907120] env[61839]: _type = "Task" [ 818.907120] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.920689] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5230ba1b-921f-5a9f-0ce3-2074d999d2d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.048397] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314393, 'name': Rename_Task, 'duration_secs': 0.277668} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.048677] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 819.048935] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c1e9884-5712-4d3e-bdd1-9ce31ceea3be {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.055697] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 819.055697] env[61839]: value = "task-1314397" [ 819.055697] env[61839]: _type = "Task" [ 819.055697] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.063691] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.075368] env[61839]: DEBUG nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 819.126919] env[61839]: DEBUG nova.compute.manager [req-89904e38-5939-4b01-bffc-437c697a4a7a req-ff66f587-afec-423e-b8d6-3b964e0a3ab3 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Received event network-vif-plugged-6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 819.127837] env[61839]: DEBUG oslo_concurrency.lockutils [req-89904e38-5939-4b01-bffc-437c697a4a7a req-ff66f587-afec-423e-b8d6-3b964e0a3ab3 service nova] Acquiring lock "047080fa-8781-47b1-89d8-2e4c8031b164-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.127837] env[61839]: DEBUG oslo_concurrency.lockutils [req-89904e38-5939-4b01-bffc-437c697a4a7a req-ff66f587-afec-423e-b8d6-3b964e0a3ab3 service nova] Lock "047080fa-8781-47b1-89d8-2e4c8031b164-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.127837] env[61839]: DEBUG oslo_concurrency.lockutils [req-89904e38-5939-4b01-bffc-437c697a4a7a req-ff66f587-afec-423e-b8d6-3b964e0a3ab3 service nova] Lock "047080fa-8781-47b1-89d8-2e4c8031b164-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.127837] env[61839]: DEBUG nova.compute.manager [req-89904e38-5939-4b01-bffc-437c697a4a7a req-ff66f587-afec-423e-b8d6-3b964e0a3ab3 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] No waiting events found dispatching network-vif-plugged-6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 819.128173] env[61839]: WARNING nova.compute.manager [req-89904e38-5939-4b01-bffc-437c697a4a7a req-ff66f587-afec-423e-b8d6-3b964e0a3ab3 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Received unexpected event network-vif-plugged-6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc for instance with vm_state building and task_state spawning. [ 819.218735] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314396, 'name': CreateVM_Task, 'duration_secs': 0.312646} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.218911] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.219964] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.354170] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667a4a22-ff02-4dc0-b81a-aef0bb2f3a66 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.367837] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223f1b11-f45f-44bc-906b-c43589c85dd7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.402206] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f640851-7daa-4020-a273-f55d0de1f3d2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.413330] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f50bc39-3e85-409f-9b61-f30d7b655492 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.426012] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5230ba1b-921f-5a9f-0ce3-2074d999d2d5, 'name': SearchDatastore_Task, 'duration_secs': 0.016139} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.434104] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.434392] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.434642] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.434821] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.435037] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.436927] env[61839]: DEBUG nova.compute.provider_tree [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.438909] env[61839]: DEBUG nova.network.neutron [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Updated VIF entry in instance network info cache for port a65466f1-75e9-41dd-8045-71a68e9b31d2. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 819.439281] env[61839]: DEBUG nova.network.neutron [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Updating instance_info_cache with network_info: [{"id": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "address": "fa:16:3e:9a:42:3c", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa65466f1-75", "ovs_interfaceid": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.440338] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.440659] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 819.440906] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19acd8ba-902f-4207-b1e6-7c75e672287d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.447101] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d488de5-978e-4ebc-8b7e-99fa7d635633 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.454805] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 819.454805] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529ff64e-bd1a-a8e9-8870-b07b5c8999d8" [ 819.454805] env[61839]: _type = "Task" [ 819.454805] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.456955] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.457183] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.461034] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b751670e-1f93-4d32-a5e8-a959cfa4fc08 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.472019] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529ff64e-bd1a-a8e9-8870-b07b5c8999d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009873} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.472019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.472019] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.472019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.472019] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 819.472019] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f9f8e5-44da-ea62-5de1-94cfdd524110" [ 819.472243] env[61839]: _type = "Task" [ 819.472243] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.479034] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f9f8e5-44da-ea62-5de1-94cfdd524110, 'name': SearchDatastore_Task, 'duration_secs': 0.009595} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.481496] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecdfea39-af83-4fc5-921c-d3b35a2ceabb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.485370] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 819.485370] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527a0f55-0df6-2ff3-3478-fc813c2900d2" [ 819.485370] env[61839]: _type = "Task" [ 819.485370] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.493178] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527a0f55-0df6-2ff3-3478-fc813c2900d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.566463] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314397, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.568769] env[61839]: DEBUG nova.network.neutron [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Successfully created port: bc314e6e-7005-4b41-8a6a-929b75782af8 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.744534] env[61839]: DEBUG nova.network.neutron [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Successfully updated port: 6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.948387] env[61839]: DEBUG nova.scheduler.client.report [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.952920] env[61839]: DEBUG oslo_concurrency.lockutils [req-051b40d8-a185-4654-bf8b-4e0de39bba16 req-919fa613-3163-4bfb-a4dd-e5b87e1a0cb0 service nova] Releasing lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.986060] env[61839]: DEBUG nova.network.neutron [-] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.999027] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527a0f55-0df6-2ff3-3478-fc813c2900d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011317} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.999636] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.999909] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4/fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.000228] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.000421] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.000641] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c9e2c40-7c1c-44ef-90ac-16edc513bb12 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.004982] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2697ace7-d714-40f0-9d54-c979e71f0c4a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.011894] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 820.011894] env[61839]: value = "task-1314398" [ 820.011894] env[61839]: _type = "Task" [ 820.011894] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.013041] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.013208] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.016964] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1052346a-9180-444c-b259-d9629f129fd2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.023863] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.024782] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 820.024782] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52531c7d-ba6c-c431-9e88-4f73b6269a1c" [ 820.024782] env[61839]: _type = "Task" [ 820.024782] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.032120] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52531c7d-ba6c-c431-9e88-4f73b6269a1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.068966] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314397, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.086317] env[61839]: DEBUG nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 820.114853] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 820.115447] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 820.115447] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.115623] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 820.115789] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.115978] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 820.116279] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 820.116473] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 820.116692] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 820.116903] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 820.117148] env[61839]: DEBUG nova.virt.hardware [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.118158] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7860d7d0-207b-40f3-94c7-45d34cfb3264 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.126950] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d4a64e-94d3-42ff-a77b-b5254e5fbccc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.162386] env[61839]: INFO nova.compute.manager [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Unrescuing [ 820.162821] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.163117] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.163344] env[61839]: DEBUG nova.network.neutron [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.247365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-047080fa-8781-47b1-89d8-2e4c8031b164" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.247365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-047080fa-8781-47b1-89d8-2e4c8031b164" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.247365] env[61839]: DEBUG nova.network.neutron [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.454034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.454594] env[61839]: DEBUG nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 820.457927] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.064s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.458186] env[61839]: DEBUG nova.objects.instance [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lazy-loading 'resources' on Instance uuid 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 820.492503] env[61839]: INFO nova.compute.manager [-] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Took 1.73 seconds to deallocate network for instance. [ 820.523116] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314398, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510085} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.523516] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4/fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 820.523913] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.524100] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2062d814-b190-4e63-81d7-c982ccce3ceb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.536214] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52531c7d-ba6c-c431-9e88-4f73b6269a1c, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.538782] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 820.538782] env[61839]: value = "task-1314399" [ 820.538782] env[61839]: _type = "Task" [ 820.538782] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.539185] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7809700-6130-4417-8a4a-aa3dc4535dc5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.551835] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314399, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.552814] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 820.552814] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ad78e5-39f2-0872-76b5-1f403dec21cb" [ 820.552814] env[61839]: _type = "Task" [ 820.552814] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.561601] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ad78e5-39f2-0872-76b5-1f403dec21cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.569509] env[61839]: DEBUG oslo_vmware.api [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314397, 'name': PowerOnVM_Task, 'duration_secs': 1.093525} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.569816] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 820.569972] env[61839]: INFO nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Took 10.10 seconds to spawn the instance on the hypervisor. [ 820.570166] env[61839]: DEBUG nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 820.570924] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acaf0f5d-2602-4ea2-a368-688ce4a959cf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.720731] env[61839]: DEBUG nova.compute.manager [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Received event network-vif-deleted-140a636b-8035-4bf7-8462-ae02a41f2316 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.720944] env[61839]: DEBUG nova.compute.manager [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Received event network-changed-6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.721125] env[61839]: DEBUG nova.compute.manager [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Refreshing instance network info cache due to event network-changed-6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 820.721349] env[61839]: DEBUG oslo_concurrency.lockutils [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] Acquiring lock "refresh_cache-047080fa-8781-47b1-89d8-2e4c8031b164" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.817459] env[61839]: DEBUG nova.network.neutron [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.964889] env[61839]: DEBUG nova.compute.utils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.967364] env[61839]: DEBUG nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 820.967364] env[61839]: DEBUG nova.network.neutron [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.000912] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.031177] env[61839]: DEBUG nova.policy [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '600416123b9a4d4ab84f866d0a278a9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b73ee7e490914f54925597f38c8cc05b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 821.053285] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314399, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074336} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.057209] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.065971] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fe17bd-895c-4cb6-b0a9-750c1f7396e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.076572] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ad78e5-39f2-0872-76b5-1f403dec21cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010115} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.090450] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.090988] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8/6f43abec-51e2-40e4-8a0f-5a8617a9a9f8.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.105830] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4/fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.107101] env[61839]: DEBUG nova.network.neutron [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updating instance_info_cache with network_info: [{"id": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "address": "fa:16:3e:5a:64:4f", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e228a-75", "ovs_interfaceid": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.115743] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8038fe81-3f6b-478f-a77c-503eaef2cffa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.120161] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a21d3d80-f8fb-491b-a873-77c30af8d0a0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.135711] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.139438] env[61839]: DEBUG nova.objects.instance [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lazy-loading 'flavor' on Instance uuid 5c29c188-a34b-4751-9f8b-166af7b15088 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.139438] env[61839]: INFO nova.compute.manager [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Took 35.20 seconds to build instance. [ 821.144820] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 821.144820] env[61839]: value = "task-1314400" [ 821.144820] env[61839]: _type = "Task" [ 821.144820] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.146385] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 821.146385] env[61839]: value = "task-1314401" [ 821.146385] env[61839]: _type = "Task" [ 821.146385] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.163552] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.167524] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.237758] env[61839]: DEBUG nova.network.neutron [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Updating instance_info_cache with network_info: [{"id": "6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc", "address": "fa:16:3e:2e:3e:05", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6abe31af-55", "ovs_interfaceid": "6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.348266] env[61839]: DEBUG nova.network.neutron [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Successfully created port: 4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.380166] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af168768-c4d9-4474-99a7-d6078b7bff6d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.389314] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fbf734-3217-4038-8dfa-b168d43217da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.426253] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d60459-bd10-4444-969a-82672c64a899 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.434160] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfb467a-d997-4fa1-a645-9ee1013db78d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.448503] env[61839]: DEBUG nova.compute.provider_tree [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.469942] env[61839]: DEBUG nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 821.570408] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a661cc10-5c4e-421b-b70b-189f0a613e8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.599173] env[61839]: DEBUG nova.network.neutron [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Successfully updated port: bc314e6e-7005-4b41-8a6a-929b75782af8 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.640839] env[61839]: DEBUG oslo_concurrency.lockutils [None req-67f27bd5-5058-4162-a21b-0d02d096f4b9 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.550s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.644958] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.075s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.645234] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a661cc10-5c4e-421b-b70b-189f0a613e8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.645459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.645634] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.650373] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821f3f46-4e98-4279-9422-2471262f83a7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.653568] env[61839]: INFO nova.compute.manager [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Terminating instance [ 821.659019] env[61839]: DEBUG nova.compute.manager [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 821.659149] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 821.663849] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71af052-189e-42fc-80bb-7b5e94b94dc9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.686030] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 821.688495] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43adb9b3-6370-4532-925c-e7670452c29e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.698019] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314400, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.698019] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.703306] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 821.703306] env[61839]: value = "task-1314402" [ 821.703306] env[61839]: _type = "Task" [ 821.703306] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.705698] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 821.709433] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b73240c8-8418-43d6-9980-871a2ecd6f32 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.717428] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.719380] env[61839]: DEBUG oslo_vmware.api [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 821.719380] env[61839]: value = "task-1314403" [ 821.719380] env[61839]: _type = "Task" [ 821.719380] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.727532] env[61839]: DEBUG oslo_vmware.api [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.740549] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-047080fa-8781-47b1-89d8-2e4c8031b164" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.740900] env[61839]: DEBUG nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Instance network_info: |[{"id": "6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc", "address": "fa:16:3e:2e:3e:05", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6abe31af-55", "ovs_interfaceid": "6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 821.741262] env[61839]: DEBUG oslo_concurrency.lockutils [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] Acquired lock "refresh_cache-047080fa-8781-47b1-89d8-2e4c8031b164" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.741455] env[61839]: DEBUG nova.network.neutron [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Refreshing network info cache for port 6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.742638] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:3e:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.751291] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Creating folder: Project (8935bcc7ee644cb7a2a33557a708189c). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.755272] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b1620f2-4bef-4e93-b682-60071da7edcb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.766371] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Created folder: Project (8935bcc7ee644cb7a2a33557a708189c) in parent group-v281288. [ 821.766561] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Creating folder: Instances. Parent ref: group-v281336. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.766796] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0eba8842-55f3-4a5d-baf8-50dba0bf04bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.775294] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Created folder: Instances in parent group-v281336. [ 821.775543] env[61839]: DEBUG oslo.service.loopingcall [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.775730] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.775936] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0a695c1-edcd-48e0-9a13-a540f2e15aa2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.797240] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.797240] env[61839]: value = "task-1314406" [ 821.797240] env[61839]: _type = "Task" [ 821.797240] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.807437] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314406, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.954988] env[61839]: DEBUG nova.scheduler.client.report [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 822.080378] env[61839]: DEBUG nova.network.neutron [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Updated VIF entry in instance network info cache for port 6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.080760] env[61839]: DEBUG nova.network.neutron [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Updating instance_info_cache with network_info: [{"id": "6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc", "address": "fa:16:3e:2e:3e:05", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6abe31af-55", "ovs_interfaceid": "6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.101634] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "refresh_cache-406da948-71c7-4c28-9ee3-10af64b1ab51" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.101898] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquired lock "refresh_cache-406da948-71c7-4c28-9ee3-10af64b1ab51" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.102075] env[61839]: DEBUG nova.network.neutron [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.148241] env[61839]: DEBUG nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 822.160709] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561638} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.163931] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8/6f43abec-51e2-40e4-8a0f-5a8617a9a9f8.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.164213] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.165094] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31f483ed-65ad-4c59-a89a-ef2e4e188e38 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.172315] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314401, 'name': ReconfigVM_Task, 'duration_secs': 0.753473} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.173730] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Reconfigured VM instance instance-0000003c to attach disk [datastore2] fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4/fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.174678] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 822.174678] env[61839]: value = "task-1314407" [ 822.174678] env[61839]: _type = "Task" [ 822.174678] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.174866] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-568de9fd-5e34-4a00-938f-31a53e6cb42e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.184913] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314407, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.186341] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 822.186341] env[61839]: value = "task-1314408" [ 822.186341] env[61839]: _type = "Task" [ 822.186341] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.196060] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314408, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.216875] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314402, 'name': PowerOffVM_Task, 'duration_secs': 0.291388} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.217141] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 822.222620] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Reconfiguring VM instance instance-00000038 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 822.222745] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3377744-bb9e-4c3f-9145-29d68f408e94 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.244015] env[61839]: DEBUG oslo_vmware.api [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314403, 'name': PowerOffVM_Task, 'duration_secs': 0.277468} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.245195] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 822.245375] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 822.245680] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 822.245680] env[61839]: value = "task-1314409" [ 822.245680] env[61839]: _type = "Task" [ 822.245680] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.245875] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e90eafc-8035-4c0c-ad57-1ab72e7e53fe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.255433] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314409, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.307064] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314406, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.353672] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 822.353967] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 822.354235] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleting the datastore file [datastore2] a661cc10-5c4e-421b-b70b-189f0a613e8a {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 822.354521] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01628b11-e703-494f-82c0-73a17b92c84f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.361428] env[61839]: DEBUG oslo_vmware.api [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 822.361428] env[61839]: value = "task-1314411" [ 822.361428] env[61839]: _type = "Task" [ 822.361428] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.369340] env[61839]: DEBUG oslo_vmware.api [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.459564] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.002s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.462390] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.466s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.464513] env[61839]: INFO nova.compute.claims [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.482999] env[61839]: INFO nova.scheduler.client.report [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Deleted allocations for instance 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a [ 822.486205] env[61839]: DEBUG nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 822.515204] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 822.515541] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 822.515712] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.515903] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 822.516071] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.516229] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 822.516448] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 822.516612] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 822.516786] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 822.516962] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 822.517164] env[61839]: DEBUG nova.virt.hardware [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.518373] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2ed72d-0f7d-402c-a9b0-fd8e467f0028 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.527937] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef9c069-9740-4175-8cd7-f3e88dc15ceb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.583400] env[61839]: DEBUG oslo_concurrency.lockutils [req-8c029807-a53a-4082-b82e-c9dcc03afdfd req-c5ab49f3-eb15-4930-b854-956e8a84a873 service nova] Releasing lock "refresh_cache-047080fa-8781-47b1-89d8-2e4c8031b164" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.636031] env[61839]: DEBUG nova.network.neutron [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.673242] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.687158] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314407, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.259428} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.690151] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.690957] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687b663b-934a-49c9-84f6-c313aaacf452 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.698897] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314408, 'name': Rename_Task, 'duration_secs': 0.399398} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.708607] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 822.717398] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8/6f43abec-51e2-40e4-8a0f-5a8617a9a9f8.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.717720] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a0535fa-748b-446e-a1e2-f5477a40345f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.723019] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-608fb256-d2da-4af9-88df-e876978de7fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.740835] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 822.740835] env[61839]: value = "task-1314412" [ 822.740835] env[61839]: _type = "Task" [ 822.740835] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.741828] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 822.741828] env[61839]: value = "task-1314413" [ 822.741828] env[61839]: _type = "Task" [ 822.741828] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.758135] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.762062] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.768298] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314409, 'name': ReconfigVM_Task, 'duration_secs': 0.434669} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.768560] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Reconfigured VM instance instance-00000038 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 822.768742] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 822.768989] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d51748e4-87af-4e06-8a6e-a817a6d750c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.775724] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 822.775724] env[61839]: value = "task-1314414" [ 822.775724] env[61839]: _type = "Task" [ 822.775724] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.783974] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.812388] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314406, 'name': CreateVM_Task, 'duration_secs': 0.538499} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.812904] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.813934] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.814370] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.814920] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 822.815389] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e545918b-b88b-4841-a480-a1f8ed5fa294 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.822606] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 822.822606] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5277d37f-f31d-e101-c8c0-aa733c31083c" [ 822.822606] env[61839]: _type = "Task" [ 822.822606] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.831606] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5277d37f-f31d-e101-c8c0-aa733c31083c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.871175] env[61839]: DEBUG oslo_vmware.api [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445133} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.871635] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 822.871960] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 822.872283] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.872602] env[61839]: INFO nova.compute.manager [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Took 1.21 seconds to destroy the instance on the hypervisor. [ 822.872972] env[61839]: DEBUG oslo.service.loopingcall [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.873323] env[61839]: DEBUG nova.compute.manager [-] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.873509] env[61839]: DEBUG nova.network.neutron [-] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.883044] env[61839]: DEBUG nova.compute.manager [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Received event network-vif-plugged-bc314e6e-7005-4b41-8a6a-929b75782af8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.883044] env[61839]: DEBUG oslo_concurrency.lockutils [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] Acquiring lock "406da948-71c7-4c28-9ee3-10af64b1ab51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.883212] env[61839]: DEBUG oslo_concurrency.lockutils [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.883372] env[61839]: DEBUG oslo_concurrency.lockutils [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.883546] env[61839]: DEBUG nova.compute.manager [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] No waiting events found dispatching network-vif-plugged-bc314e6e-7005-4b41-8a6a-929b75782af8 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 822.883719] env[61839]: WARNING nova.compute.manager [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Received unexpected event network-vif-plugged-bc314e6e-7005-4b41-8a6a-929b75782af8 for instance with vm_state building and task_state spawning. [ 822.883903] env[61839]: DEBUG nova.compute.manager [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Received event network-changed-bc314e6e-7005-4b41-8a6a-929b75782af8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.884353] env[61839]: DEBUG nova.compute.manager [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Refreshing instance network info cache due to event network-changed-bc314e6e-7005-4b41-8a6a-929b75782af8. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 822.884710] env[61839]: DEBUG oslo_concurrency.lockutils [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] Acquiring lock "refresh_cache-406da948-71c7-4c28-9ee3-10af64b1ab51" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.942325] env[61839]: DEBUG nova.network.neutron [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Successfully updated port: 4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.994609] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ae67ebcf-1bc3-48ee-a99f-0d94b4c40112 tempest-VolumesAdminNegativeTest-747243869 tempest-VolumesAdminNegativeTest-747243869-project-member] Lock "687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.952s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.997366] env[61839]: DEBUG nova.compute.manager [req-71ec9b1e-c5f4-4fe7-8418-e621dbc1bb7e req-87799354-f864-4d27-8ee8-214da3eac190 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-vif-plugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.997585] env[61839]: DEBUG oslo_concurrency.lockutils [req-71ec9b1e-c5f4-4fe7-8418-e621dbc1bb7e req-87799354-f864-4d27-8ee8-214da3eac190 service nova] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.997792] env[61839]: DEBUG oslo_concurrency.lockutils [req-71ec9b1e-c5f4-4fe7-8418-e621dbc1bb7e req-87799354-f864-4d27-8ee8-214da3eac190 service nova] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.997965] env[61839]: DEBUG oslo_concurrency.lockutils [req-71ec9b1e-c5f4-4fe7-8418-e621dbc1bb7e req-87799354-f864-4d27-8ee8-214da3eac190 service nova] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.998158] env[61839]: DEBUG nova.compute.manager [req-71ec9b1e-c5f4-4fe7-8418-e621dbc1bb7e req-87799354-f864-4d27-8ee8-214da3eac190 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] No waiting events found dispatching network-vif-plugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 822.998331] env[61839]: WARNING nova.compute.manager [req-71ec9b1e-c5f4-4fe7-8418-e621dbc1bb7e req-87799354-f864-4d27-8ee8-214da3eac190 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received unexpected event network-vif-plugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 for instance with vm_state building and task_state spawning. [ 823.174871] env[61839]: DEBUG nova.network.neutron [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Updating instance_info_cache with network_info: [{"id": "bc314e6e-7005-4b41-8a6a-929b75782af8", "address": "fa:16:3e:79:80:f2", "network": {"id": "a12d9cf9-2f2f-4f36-bc04-476565e7aad9", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-126835777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebb68c2dc9904844a7549b63ac2cf0fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc314e6e-70", "ovs_interfaceid": "bc314e6e-7005-4b41-8a6a-929b75782af8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.258103] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314413, 'name': ReconfigVM_Task, 'duration_secs': 0.309124} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.258910] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314412, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.259286] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8/6f43abec-51e2-40e4-8a0f-5a8617a9a9f8.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.259840] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-816fcb08-b78c-425f-96ec-a7eda51cc1bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.269930] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 823.269930] env[61839]: value = "task-1314415" [ 823.269930] env[61839]: _type = "Task" [ 823.269930] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.278952] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314415, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.287267] env[61839]: DEBUG oslo_vmware.api [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314414, 'name': PowerOnVM_Task, 'duration_secs': 0.435545} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.287384] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 823.287798] env[61839]: DEBUG nova.compute.manager [None req-67d71366-93ae-43e4-8ab7-2c9c6a58104b tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 823.288415] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bac9b9c-40c7-4fe8-9094-c4aa90d01d21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.332891] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5277d37f-f31d-e101-c8c0-aa733c31083c, 'name': SearchDatastore_Task, 'duration_secs': 0.010812} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.334137] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.334227] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 823.334409] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.334570] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.334741] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 823.335295] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28089ca7-86c0-49ad-a727-1baa9d1e4a03 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.345758] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 823.345969] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 823.346733] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2478210-5778-4ad2-858e-317509bb130f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.355781] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 823.355781] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520ac4c3-34b0-4806-9734-78aa2deffa5d" [ 823.355781] env[61839]: _type = "Task" [ 823.355781] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.365841] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520ac4c3-34b0-4806-9734-78aa2deffa5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.445127] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.445383] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.445456] env[61839]: DEBUG nova.network.neutron [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.677621] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Releasing lock "refresh_cache-406da948-71c7-4c28-9ee3-10af64b1ab51" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.677992] env[61839]: DEBUG nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Instance network_info: |[{"id": "bc314e6e-7005-4b41-8a6a-929b75782af8", "address": "fa:16:3e:79:80:f2", "network": {"id": "a12d9cf9-2f2f-4f36-bc04-476565e7aad9", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-126835777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebb68c2dc9904844a7549b63ac2cf0fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc314e6e-70", "ovs_interfaceid": "bc314e6e-7005-4b41-8a6a-929b75782af8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 823.681751] env[61839]: DEBUG oslo_concurrency.lockutils [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] Acquired lock "refresh_cache-406da948-71c7-4c28-9ee3-10af64b1ab51" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.681751] env[61839]: DEBUG nova.network.neutron [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Refreshing network info cache for port bc314e6e-7005-4b41-8a6a-929b75782af8 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.684968] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:80:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4aa1eda7-48b9-4fa2-af0b-94c718313af2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc314e6e-7005-4b41-8a6a-929b75782af8', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.690951] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Creating folder: Project (ebb68c2dc9904844a7549b63ac2cf0fc). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 823.694123] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47162486-5156-46be-9f40-5b388f918197 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.706369] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Created folder: Project (ebb68c2dc9904844a7549b63ac2cf0fc) in parent group-v281288. [ 823.706584] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Creating folder: Instances. Parent ref: group-v281339. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 823.707490] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b89a4ea4-c2fd-4187-b041-1aa1d92a4ff2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.718240] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Created folder: Instances in parent group-v281339. [ 823.718240] env[61839]: DEBUG oslo.service.loopingcall [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 823.718771] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.719483] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4387ea30-c998-491d-832c-0062643cf35b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.747624] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.747624] env[61839]: value = "task-1314418" [ 823.747624] env[61839]: _type = "Task" [ 823.747624] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.753877] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314412, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.760891] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314418, 'name': CreateVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.781621] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314415, 'name': Rename_Task, 'duration_secs': 0.166449} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.782015] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.782239] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84c40b05-d33d-4531-849d-c3dc2d9a7070 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.787834] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 823.787834] env[61839]: value = "task-1314419" [ 823.787834] env[61839]: _type = "Task" [ 823.787834] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.792567] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a261bf6-aeb9-4268-9c91-e60f24374461 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.803942] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314419, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.808648] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89594b48-a020-46ce-9833-6304f9aea2cf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.844893] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25c8881-4459-4acb-b2d0-80bff0f78df7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.853468] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7126842-9dd5-41b2-8292-0c5ebd772719 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.868512] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520ac4c3-34b0-4806-9734-78aa2deffa5d, 'name': SearchDatastore_Task, 'duration_secs': 0.011541} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.877531] env[61839]: DEBUG nova.compute.provider_tree [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.878884] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4af6ab5e-181e-44a6-8340-44ecfd12c4ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.886914] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 823.886914] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528f5cef-a961-9117-92bd-4c6174337881" [ 823.886914] env[61839]: _type = "Task" [ 823.886914] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.895415] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528f5cef-a961-9117-92bd-4c6174337881, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.986074] env[61839]: DEBUG nova.network.neutron [-] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.013754] env[61839]: DEBUG nova.network.neutron [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.215274] env[61839]: DEBUG nova.network.neutron [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.266942] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314418, 'name': CreateVM_Task, 'duration_secs': 0.307055} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.267461] env[61839]: DEBUG oslo_vmware.api [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314412, 'name': PowerOnVM_Task, 'duration_secs': 1.351421} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.267619] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.268800] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.269035] env[61839]: INFO nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Took 11.39 seconds to spawn the instance on the hypervisor. [ 824.270088] env[61839]: DEBUG nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 824.270088] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.270088] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.270410] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 824.271176] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8079786-1f45-4e7a-8254-ef93a71a9161 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.273706] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9abb9354-8f2c-4e65-84df-8bd8138cb6d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.286228] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 824.286228] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52806196-69b9-7381-14f3-4ccb088dc0e3" [ 824.286228] env[61839]: _type = "Task" [ 824.286228] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.298029] env[61839]: DEBUG oslo_vmware.api [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314419, 'name': PowerOnVM_Task, 'duration_secs': 0.476675} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.300747] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.301119] env[61839]: INFO nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Took 9.05 seconds to spawn the instance on the hypervisor. [ 824.301187] env[61839]: DEBUG nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 824.301425] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52806196-69b9-7381-14f3-4ccb088dc0e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.304242] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c54012-1a4e-49ec-be90-40fa222d9aec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.382646] env[61839]: DEBUG nova.scheduler.client.report [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 824.399625] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528f5cef-a961-9117-92bd-4c6174337881, 'name': SearchDatastore_Task, 'duration_secs': 0.009748} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.402157] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.402420] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 824.403248] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c884384-ad9d-47ff-b171-b162f896593c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.410422] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 824.410422] env[61839]: value = "task-1314420" [ 824.410422] env[61839]: _type = "Task" [ 824.410422] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.418627] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314420, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.489585] env[61839]: INFO nova.compute.manager [-] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Took 1.61 seconds to deallocate network for instance. [ 824.529469] env[61839]: DEBUG nova.network.neutron [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Updated VIF entry in instance network info cache for port bc314e6e-7005-4b41-8a6a-929b75782af8. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.529883] env[61839]: DEBUG nova.network.neutron [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Updating instance_info_cache with network_info: [{"id": "bc314e6e-7005-4b41-8a6a-929b75782af8", "address": "fa:16:3e:79:80:f2", "network": {"id": "a12d9cf9-2f2f-4f36-bc04-476565e7aad9", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-126835777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebb68c2dc9904844a7549b63ac2cf0fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc314e6e-70", "ovs_interfaceid": "bc314e6e-7005-4b41-8a6a-929b75782af8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.718604] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.722021] env[61839]: DEBUG nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Instance network_info: |[{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 824.722239] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:cb:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c2c0fb5-064b-4c53-9498-77b9ddc16884', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.726988] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating folder: Project (b73ee7e490914f54925597f38c8cc05b). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.727317] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4a5c654-dc29-4335-b248-ea9ef1af2df6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.737548] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created folder: Project (b73ee7e490914f54925597f38c8cc05b) in parent group-v281288. [ 824.737829] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating folder: Instances. Parent ref: group-v281342. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.738144] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57909f6b-3094-4a6e-bf5f-bda52f97b13d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.751272] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created folder: Instances in parent group-v281342. [ 824.751586] env[61839]: DEBUG oslo.service.loopingcall [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.751876] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.752204] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d2e8ded-b77d-465f-bad6-50db76912138 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.782892] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.782892] env[61839]: value = "task-1314423" [ 824.782892] env[61839]: _type = "Task" [ 824.782892] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.816149] env[61839]: INFO nova.compute.manager [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Took 33.78 seconds to build instance. [ 824.824405] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314423, 'name': CreateVM_Task} progress is 15%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.832634] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52806196-69b9-7381-14f3-4ccb088dc0e3, 'name': SearchDatastore_Task, 'duration_secs': 0.031226} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.833381] env[61839]: INFO nova.compute.manager [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Took 31.95 seconds to build instance. [ 824.834870] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.835125] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.835719] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.835719] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.836033] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.836775] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6577eeda-92a3-42d3-a692-8192079706b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.846991] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.848109] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.848516] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5415786e-cd30-424b-b124-c83ea60e157c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.855087] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 824.855087] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524bf16d-f888-1ad3-e520-d1529b17ad1b" [ 824.855087] env[61839]: _type = "Task" [ 824.855087] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.864968] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524bf16d-f888-1ad3-e520-d1529b17ad1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.888028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.888028] env[61839]: DEBUG nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 824.891033] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.206s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.894737] env[61839]: INFO nova.compute.claims [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.920197] env[61839]: DEBUG nova.compute.manager [req-f541fab9-ba89-4805-9bff-ff81f7a854a8 req-74efc6d6-b7c8-4d99-8ac5-f10906af3804 service nova] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Received event network-vif-deleted-2adc8fda-8606-45a2-95cb-f775d70870e8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 824.924220] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314420, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.003011] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.030408] env[61839]: DEBUG nova.compute.manager [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 825.030690] env[61839]: DEBUG nova.compute.manager [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing instance network info cache due to event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 825.030829] env[61839]: DEBUG oslo_concurrency.lockutils [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] Acquiring lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.030980] env[61839]: DEBUG oslo_concurrency.lockutils [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] Acquired lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.031164] env[61839]: DEBUG nova.network.neutron [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.033013] env[61839]: DEBUG oslo_concurrency.lockutils [req-dacc26d6-f5af-47ef-88da-f54c79bd1295 req-1c4f4d7a-85e9-49d9-a34d-844dd4b6af65 service nova] Releasing lock "refresh_cache-406da948-71c7-4c28-9ee3-10af64b1ab51" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.296559] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314423, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.329045] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4445949b-c57d-48df-ad34-d533bc3c88d3 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.469s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.337802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fb2077f5-f93b-4202-be51-2a1ff1444651 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.497s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.367221] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524bf16d-f888-1ad3-e520-d1529b17ad1b, 'name': SearchDatastore_Task, 'duration_secs': 0.01087} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.368089] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99a270ed-5e7e-40c0-b531-54b85d7a0d64 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.374354] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 825.374354] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ae007b-9304-c478-a6cd-97d5b2621b8e" [ 825.374354] env[61839]: _type = "Task" [ 825.374354] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.383252] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ae007b-9304-c478-a6cd-97d5b2621b8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.398166] env[61839]: DEBUG nova.compute.utils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.399499] env[61839]: DEBUG nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 825.399819] env[61839]: DEBUG nova.network.neutron [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.425292] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314420, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.847463} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.425515] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 825.425787] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 825.426139] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc606a8b-2731-4c11-a0a2-b638aa6b5edc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.433719] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 825.433719] env[61839]: value = "task-1314424" [ 825.433719] env[61839]: _type = "Task" [ 825.433719] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.443675] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.544700] env[61839]: DEBUG nova.policy [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31150683ebde44ad8f7d22d416f6dd65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9a08aa6b2064b06874f5a2a3063d8b2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.800356] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314423, 'name': CreateVM_Task, 'duration_secs': 0.681329} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.800356] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 825.801520] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.801520] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.801687] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 825.802015] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5885682b-b47c-4c49-a061-977ae1e425bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.807483] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 825.807483] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524f692d-f89b-36e1-468c-35a0036e9b05" [ 825.807483] env[61839]: _type = "Task" [ 825.807483] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.823029] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524f692d-f89b-36e1-468c-35a0036e9b05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.835022] env[61839]: DEBUG nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 825.842608] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 825.884400] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ae007b-9304-c478-a6cd-97d5b2621b8e, 'name': SearchDatastore_Task, 'duration_secs': 0.009818} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.885422] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.885815] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 406da948-71c7-4c28-9ee3-10af64b1ab51/406da948-71c7-4c28-9ee3-10af64b1ab51.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.886217] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fed18e68-90d5-4bda-9623-aa9833f1b800 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.893380] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 825.893380] env[61839]: value = "task-1314425" [ 825.893380] env[61839]: _type = "Task" [ 825.893380] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.903731] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.907706] env[61839]: DEBUG nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 825.950348] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092087} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.950642] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 825.951477] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b980aa-31b3-4c7d-aa63-482f8cbbbef4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.985183] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 825.988464] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79cfbe6c-7050-4a43-b73e-7e47da769148 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.008455] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 826.008455] env[61839]: value = "task-1314426" [ 826.008455] env[61839]: _type = "Task" [ 826.008455] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.020283] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314426, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.265600] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdab361-b121-4f87-84d6-a433d81fe7d1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.273953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98be5e9a-2a97-4435-a899-db62f589f78d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.313322] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d061cbc-f393-4cca-b8d1-82bd9eb27bfa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.323563] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524f692d-f89b-36e1-468c-35a0036e9b05, 'name': SearchDatastore_Task, 'duration_secs': 0.013839} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.324267] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.324513] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.325566] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.325566] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.325566] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.325566] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d15c4c6-b8ed-4d51-a2bb-2dbe51b88724 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.328149] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbd10e5-40f8-4953-988c-9f386d00627a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.353601] env[61839]: DEBUG nova.compute.provider_tree [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.362313] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.362499] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.364123] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-368519d0-f74e-4f82-9e8f-1ac6a7b9a70e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.374022] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 826.374022] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef4e4c-1fb2-bfe0-c408-428e9f6c4134" [ 826.374022] env[61839]: _type = "Task" [ 826.374022] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.382193] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef4e4c-1fb2-bfe0-c408-428e9f6c4134, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.387152] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.388130] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.406022] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.518595] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314426, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.626771] env[61839]: DEBUG nova.network.neutron [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updated VIF entry in instance network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 826.627227] env[61839]: DEBUG nova.network.neutron [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.884315] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef4e4c-1fb2-bfe0-c408-428e9f6c4134, 'name': SearchDatastore_Task, 'duration_secs': 0.033796} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.885220] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0037f99e-aef1-4c2f-ae3b-496feaf9f2b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.890853] env[61839]: ERROR nova.scheduler.client.report [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [req-61ae48fa-29e0-437c-8598-a2590ca365ef] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-61ae48fa-29e0-437c-8598-a2590ca365ef"}]} [ 826.903094] env[61839]: DEBUG nova.network.neutron [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Successfully created port: 2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.905993] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 826.905993] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52576148-b1de-3078-32cb-a5762b28f7e5" [ 826.905993] env[61839]: _type = "Task" [ 826.905993] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.914683] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314425, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.915866] env[61839]: DEBUG nova.scheduler.client.report [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 826.921057] env[61839]: DEBUG nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 826.926956] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52576148-b1de-3078-32cb-a5762b28f7e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.934751] env[61839]: DEBUG nova.scheduler.client.report [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 826.934751] env[61839]: DEBUG nova.compute.provider_tree [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.956980] env[61839]: DEBUG nova.scheduler.client.report [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 826.963900] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 826.964262] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 826.964355] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.964545] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 826.964696] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.964845] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 826.965114] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 826.965370] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 826.965458] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 826.965629] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 826.965808] env[61839]: DEBUG nova.virt.hardware [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.966753] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4b41f1-04d9-400c-a802-fd4d1f21488d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.979421] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92ac9da-8342-41a8-b83d-2669cfc47033 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.989277] env[61839]: DEBUG nova.scheduler.client.report [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 827.025757] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314426, 'name': ReconfigVM_Task, 'duration_secs': 0.912296} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.026268] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.027123] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-726f2d9a-af80-45d0-b29e-118c8b2e86d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.034072] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 827.034072] env[61839]: value = "task-1314427" [ 827.034072] env[61839]: _type = "Task" [ 827.034072] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.046902] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314427, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.130277] env[61839]: DEBUG oslo_concurrency.lockutils [req-f68b0b0b-c6d1-451b-96bc-80c85fc20514 req-d3adab6d-39cb-4425-98f3-e40a597cc837 service nova] Releasing lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.246742] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a774af97-4099-489d-9f9d-978b25469ec3 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "36485aa7-51c9-4cab-bfbd-739fdc09af17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.246993] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a774af97-4099-489d-9f9d-978b25469ec3 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "36485aa7-51c9-4cab-bfbd-739fdc09af17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.363926] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1648630-b229-4967-8804-1592d0901ec9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.374522] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d59f6b-cbb9-45bc-8a4e-fb8f555a60e3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.416603] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0fc4717-b683-4ab0-9757-acc4f6a5bd55 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.429532] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52576148-b1de-3078-32cb-a5762b28f7e5, 'name': SearchDatastore_Task, 'duration_secs': 0.063886} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.429890] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314425, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.147942} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.430298] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.430620] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.430969] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 406da948-71c7-4c28-9ee3-10af64b1ab51/406da948-71c7-4c28-9ee3-10af64b1ab51.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.431238] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.432577] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48a110b-5a81-4352-9c77-d11903e3694c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.437348] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89d53f1f-1b8c-46c5-979c-306cd7115f4f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.439435] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d77515a5-dfca-48fd-8957-d6273de02def {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.451821] env[61839]: DEBUG nova.compute.provider_tree [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.457266] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 827.457266] env[61839]: value = "task-1314429" [ 827.457266] env[61839]: _type = "Task" [ 827.457266] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.457266] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 827.457266] env[61839]: value = "task-1314428" [ 827.457266] env[61839]: _type = "Task" [ 827.457266] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.468591] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.474103] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.548947] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314427, 'name': Rename_Task, 'duration_secs': 0.167787} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.552200] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.552200] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7a06838-2b77-4636-abf7-4d6b97e137fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.560552] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 827.560552] env[61839]: value = "task-1314430" [ 827.560552] env[61839]: _type = "Task" [ 827.560552] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.570513] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314430, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.637959] env[61839]: DEBUG nova.compute.manager [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Received event network-changed-bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.638079] env[61839]: DEBUG nova.compute.manager [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Refreshing instance network info cache due to event network-changed-bc315481-8651-4be3-bdd5-269b569b2817. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 827.638350] env[61839]: DEBUG oslo_concurrency.lockutils [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] Acquiring lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.638495] env[61839]: DEBUG oslo_concurrency.lockutils [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] Acquired lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.638743] env[61839]: DEBUG nova.network.neutron [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Refreshing network info cache for port bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.958072] env[61839]: DEBUG nova.scheduler.client.report [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.987524] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078117} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.989487] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314428, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.990682] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.992103] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9bd75a-717b-4a83-b6bd-0ce16936695f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.017903] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 406da948-71c7-4c28-9ee3-10af64b1ab51/406da948-71c7-4c28-9ee3-10af64b1ab51.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 828.019625] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19edbdd8-8551-46f8-b9ce-31c0979eb072 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.041991] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 828.041991] env[61839]: value = "task-1314431" [ 828.041991] env[61839]: _type = "Task" [ 828.041991] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.058920] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.071473] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314430, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.244441] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "56369316-a445-4a2a-a0a6-967074104e19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.244441] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.471996] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719391} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.479453] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.588s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.479994] env[61839]: DEBUG nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 828.483227] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.484099] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.484885] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.992s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.487104] env[61839]: INFO nova.compute.claims [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.494318] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb9021cb-5234-4ea9-8368-0c4c4ffe4eb1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.508016] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 828.508016] env[61839]: value = "task-1314432" [ 828.508016] env[61839]: _type = "Task" [ 828.508016] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.514486] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314432, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.553547] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.573118] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314430, 'name': PowerOnVM_Task} progress is 37%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.597163] env[61839]: DEBUG nova.network.neutron [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updated VIF entry in instance network info cache for port bc315481-8651-4be3-bdd5-269b569b2817. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 828.597581] env[61839]: DEBUG nova.network.neutron [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [{"id": "bc315481-8651-4be3-bdd5-269b569b2817", "address": "fa:16:3e:ce:61:f0", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc315481-86", "ovs_interfaceid": "bc315481-8651-4be3-bdd5-269b569b2817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.885335] env[61839]: DEBUG nova.network.neutron [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Successfully updated port: 2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.986577] env[61839]: DEBUG nova.compute.utils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 828.988705] env[61839]: DEBUG nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 828.988705] env[61839]: DEBUG nova.network.neutron [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.016518] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314432, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18192} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.016795] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.017594] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4715cda-6691-4865-92e6-814a57304156 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.040960] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.041650] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b53a09a1-7f5b-4b49-8c60-cf8e94539311 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.069100] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 829.069100] env[61839]: value = "task-1314433" [ 829.069100] env[61839]: _type = "Task" [ 829.069100] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.069420] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314431, 'name': ReconfigVM_Task, 'duration_secs': 0.616526} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.070196] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 406da948-71c7-4c28-9ee3-10af64b1ab51/406da948-71c7-4c28-9ee3-10af64b1ab51.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.073447] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-734dd25a-a05c-4fbd-8c46-ba89d8dba153 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.078226] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314430, 'name': PowerOnVM_Task} progress is 91%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.083940] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.085308] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 829.085308] env[61839]: value = "task-1314434" [ 829.085308] env[61839]: _type = "Task" [ 829.085308] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.093221] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314434, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.100280] env[61839]: DEBUG oslo_concurrency.lockutils [req-0247a74d-a8c0-4f13-8c25-80eae9a19ca8 req-13843861-4b50-413a-beda-7107e4bd4363 service nova] Releasing lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.101872] env[61839]: DEBUG nova.policy [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bcde59cc33d41fc9f68c19d6f247fff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '766b763d07a649a3aab0e3299625641a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 829.112261] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "e47c08c6-5de3-48b0-8327-57ddb273555f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.112356] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.389396] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.389591] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquired lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.389695] env[61839]: DEBUG nova.network.neutron [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.492451] env[61839]: DEBUG nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 829.506713] env[61839]: DEBUG nova.network.neutron [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Successfully created port: 32fd36fe-b511-4c88-9578-3d72ebf35d22 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.578277] env[61839]: DEBUG oslo_vmware.api [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314430, 'name': PowerOnVM_Task, 'duration_secs': 1.521916} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.580652] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.580652] env[61839]: INFO nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Took 11.92 seconds to spawn the instance on the hypervisor. [ 829.580652] env[61839]: DEBUG nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 829.580652] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ce251a-b142-407d-b95a-05d1fcac7992 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.589098] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.603209] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314434, 'name': Rename_Task, 'duration_secs': 0.291262} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.603489] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.604066] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f12d50ad-ba66-4249-8d72-63826c16dbcf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.610073] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 829.610073] env[61839]: value = "task-1314435" [ 829.610073] env[61839]: _type = "Task" [ 829.610073] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.623998] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314435, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.669254] env[61839]: DEBUG nova.compute.manager [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Received event network-vif-plugged-2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.669466] env[61839]: DEBUG oslo_concurrency.lockutils [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] Acquiring lock "39728872-2d30-48eb-90da-412f1e45971c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.669671] env[61839]: DEBUG oslo_concurrency.lockutils [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] Lock "39728872-2d30-48eb-90da-412f1e45971c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.669837] env[61839]: DEBUG oslo_concurrency.lockutils [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] Lock "39728872-2d30-48eb-90da-412f1e45971c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.670100] env[61839]: DEBUG nova.compute.manager [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] No waiting events found dispatching network-vif-plugged-2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 829.670320] env[61839]: WARNING nova.compute.manager [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Received unexpected event network-vif-plugged-2615f303-83dd-4c9f-b09e-7cfb5da3276f for instance with vm_state building and task_state spawning. [ 829.670486] env[61839]: DEBUG nova.compute.manager [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Received event network-changed-2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.670655] env[61839]: DEBUG nova.compute.manager [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Refreshing instance network info cache due to event network-changed-2615f303-83dd-4c9f-b09e-7cfb5da3276f. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 829.670831] env[61839]: DEBUG oslo_concurrency.lockutils [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] Acquiring lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.860642] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f36d48-4283-4deb-a31e-86013f964619 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.868437] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f7373c-46e1-4bb0-9a6e-f95510c66638 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.904022] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82005a1-69e8-434e-a67a-0b018e14c1e3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.912285] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ac8468-3e50-4463-9e9a-e77ca31094ad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.926130] env[61839]: DEBUG nova.compute.provider_tree [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.954597] env[61839]: DEBUG nova.network.neutron [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.088946] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314433, 'name': ReconfigVM_Task, 'duration_secs': 0.917161} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.088946] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.088946] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13a6c660-a686-4e1f-8170-ed5e8f9551e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.111375] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 830.111375] env[61839]: value = "task-1314436" [ 830.111375] env[61839]: _type = "Task" [ 830.111375] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.121979] env[61839]: INFO nova.compute.manager [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Took 36.22 seconds to build instance. [ 830.129752] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314435, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.134888] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314436, 'name': Rename_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.432019] env[61839]: DEBUG nova.scheduler.client.report [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.435161] env[61839]: DEBUG nova.network.neutron [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Updating instance_info_cache with network_info: [{"id": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "address": "fa:16:3e:33:57:53", "network": {"id": "4f5987f4-a100-468f-86db-010c27144844", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-185915171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9a08aa6b2064b06874f5a2a3063d8b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2615f303-83", "ovs_interfaceid": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.501207] env[61839]: DEBUG nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 830.530716] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 830.530716] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 830.530716] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.531045] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 830.531045] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.531281] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 830.531592] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 830.531823] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 830.532068] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 830.533393] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 830.533393] env[61839]: DEBUG nova.virt.hardware [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.533513] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c60c48-fbf4-4eb9-a79b-64e1b5c67246 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.542468] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac2cd0f-82c9-4105-8986-968408453690 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.621660] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314435, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.626918] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0ff6597c-ab3c-4cf9-b3ae-5fc8fc4a3084 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "047080fa-8781-47b1-89d8-2e4c8031b164" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.223s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.637194] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314436, 'name': Rename_Task, 'duration_secs': 0.186528} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.637501] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.637763] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd1e81d9-153d-4ba3-a777-1baaed7e5c23 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.644675] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 830.644675] env[61839]: value = "task-1314437" [ 830.644675] env[61839]: _type = "Task" [ 830.644675] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.653887] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314437, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.938534] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.939191] env[61839]: DEBUG nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 830.942304] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.222s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.948085] env[61839]: INFO nova.compute.claims [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.956398] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Releasing lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.957076] env[61839]: DEBUG nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Instance network_info: |[{"id": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "address": "fa:16:3e:33:57:53", "network": {"id": "4f5987f4-a100-468f-86db-010c27144844", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-185915171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9a08aa6b2064b06874f5a2a3063d8b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2615f303-83", "ovs_interfaceid": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 830.961638] env[61839]: DEBUG oslo_concurrency.lockutils [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] Acquired lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.961638] env[61839]: DEBUG nova.network.neutron [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Refreshing network info cache for port 2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.961638] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:57:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0c0b05e-6d10-474c-9173-4c8f1dacac9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2615f303-83dd-4c9f-b09e-7cfb5da3276f', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.968644] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Creating folder: Project (d9a08aa6b2064b06874f5a2a3063d8b2). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.969816] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5f40c83-7c2a-461d-ae7c-e752ea0d58b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.981426] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Created folder: Project (d9a08aa6b2064b06874f5a2a3063d8b2) in parent group-v281288. [ 830.982120] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Creating folder: Instances. Parent ref: group-v281345. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.982120] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a87feb6c-e481-4932-aca0-eadfa1a31ae0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.991691] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Created folder: Instances in parent group-v281345. [ 830.992455] env[61839]: DEBUG oslo.service.loopingcall [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.992455] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 830.992694] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebe1ae08-f54c-476b-8dfe-5b924d51940d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.013912] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.013912] env[61839]: value = "task-1314440" [ 831.013912] env[61839]: _type = "Task" [ 831.013912] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.022956] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314440, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.123368] env[61839]: DEBUG oslo_vmware.api [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314435, 'name': PowerOnVM_Task, 'duration_secs': 1.035247} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.123732] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.124050] env[61839]: INFO nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Took 11.04 seconds to spawn the instance on the hypervisor. [ 831.124271] env[61839]: DEBUG nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 831.125161] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ecbbba-8087-427e-abfc-1faaa543d8a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.135505] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.155233] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314437, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.259293] env[61839]: DEBUG nova.network.neutron [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Successfully updated port: 32fd36fe-b511-4c88-9578-3d72ebf35d22 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.459927] env[61839]: DEBUG nova.compute.utils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 831.461978] env[61839]: DEBUG nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 831.462261] env[61839]: DEBUG nova.network.neutron [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 831.532239] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314440, 'name': CreateVM_Task, 'duration_secs': 0.445492} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.532555] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 831.533118] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.533296] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.535901] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 831.536218] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7b90871-ce06-4d24-8b4a-fceb656ee151 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.550217] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 831.550217] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de3d42-78a4-4e82-4502-a0b0970bcbfd" [ 831.550217] env[61839]: _type = "Task" [ 831.550217] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.559914] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de3d42-78a4-4e82-4502-a0b0970bcbfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.573402] env[61839]: DEBUG nova.policy [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c65b833e17ab49d6894b76d8e22ccc64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '434f932a2a9e448d99fac449918affe9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.651419] env[61839]: INFO nova.compute.manager [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Took 35.65 seconds to build instance. [ 831.668807] env[61839]: DEBUG oslo_vmware.api [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314437, 'name': PowerOnVM_Task, 'duration_secs': 0.730398} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.669101] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.669391] env[61839]: INFO nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Took 9.18 seconds to spawn the instance on the hypervisor. [ 831.669551] env[61839]: DEBUG nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 831.670580] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb494de-0bec-47b8-970a-e4be1d68bf77 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.685029] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.762294] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "refresh_cache-49d4720b-83e3-47d9-b727-5bb255de2e7c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.762442] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquired lock "refresh_cache-49d4720b-83e3-47d9-b727-5bb255de2e7c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.762615] env[61839]: DEBUG nova.network.neutron [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.838256] env[61839]: INFO nova.compute.manager [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Rebuilding instance [ 831.897366] env[61839]: DEBUG nova.compute.manager [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 831.898443] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831c7261-9272-4f0c-a0fe-f911ff189e2c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.953535] env[61839]: DEBUG nova.compute.manager [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Received event network-vif-plugged-32fd36fe-b511-4c88-9578-3d72ebf35d22 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.953812] env[61839]: DEBUG oslo_concurrency.lockutils [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] Acquiring lock "49d4720b-83e3-47d9-b727-5bb255de2e7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.954064] env[61839]: DEBUG oslo_concurrency.lockutils [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.954283] env[61839]: DEBUG oslo_concurrency.lockutils [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.954499] env[61839]: DEBUG nova.compute.manager [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] No waiting events found dispatching network-vif-plugged-32fd36fe-b511-4c88-9578-3d72ebf35d22 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 831.955298] env[61839]: WARNING nova.compute.manager [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Received unexpected event network-vif-plugged-32fd36fe-b511-4c88-9578-3d72ebf35d22 for instance with vm_state building and task_state spawning. [ 831.955298] env[61839]: DEBUG nova.compute.manager [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Received event network-changed-32fd36fe-b511-4c88-9578-3d72ebf35d22 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.955298] env[61839]: DEBUG nova.compute.manager [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Refreshing instance network info cache due to event network-changed-32fd36fe-b511-4c88-9578-3d72ebf35d22. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 831.955458] env[61839]: DEBUG oslo_concurrency.lockutils [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] Acquiring lock "refresh_cache-49d4720b-83e3-47d9-b727-5bb255de2e7c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.965713] env[61839]: DEBUG nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 832.057121] env[61839]: DEBUG nova.network.neutron [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Updated VIF entry in instance network info cache for port 2615f303-83dd-4c9f-b09e-7cfb5da3276f. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.057121] env[61839]: DEBUG nova.network.neutron [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Updating instance_info_cache with network_info: [{"id": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "address": "fa:16:3e:33:57:53", "network": {"id": "4f5987f4-a100-468f-86db-010c27144844", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-185915171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9a08aa6b2064b06874f5a2a3063d8b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2615f303-83", "ovs_interfaceid": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.069950] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de3d42-78a4-4e82-4502-a0b0970bcbfd, 'name': SearchDatastore_Task, 'duration_secs': 0.018447} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.073217] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.073484] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.073804] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.074039] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.076408] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.082144] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e573097b-fc28-4d61-92be-5b0e261d7dc7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.092960] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.093203] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.094289] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4d1d6e6-e952-4d97-bfd5-7d178a263b65 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.099911] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 832.099911] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528aaccc-25e1-9d94-6b8b-274cae01cbcd" [ 832.099911] env[61839]: _type = "Task" [ 832.099911] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.118627] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528aaccc-25e1-9d94-6b8b-274cae01cbcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.165699] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6c4e691d-a55a-4273-a9e4-fb954224d381 tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.466s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.191387] env[61839]: INFO nova.compute.manager [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Took 35.10 seconds to build instance. [ 832.263766] env[61839]: DEBUG nova.network.neutron [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Successfully created port: 8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.318596] env[61839]: DEBUG nova.network.neutron [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.384626] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c58a79-be32-40a3-b1a2-526713ff6c5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.398108] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb43b612-813a-4d43-b847-507c9addb589 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.443623] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.444825] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c196afa3-803a-45bc-bdf6-fbc4f7504222 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.447990] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ab35d1-68dc-491b-bcca-4c1cf152301b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.464117] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02424945-a51e-4903-b917-8a63f9589e23 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.469100] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 832.469100] env[61839]: value = "task-1314441" [ 832.469100] env[61839]: _type = "Task" [ 832.469100] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.486512] env[61839]: DEBUG nova.compute.provider_tree [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.493384] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314441, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.566486] env[61839]: DEBUG oslo_concurrency.lockutils [req-16ad573b-e9ee-40a5-abe6-3025c918bf39 req-79f8866e-1798-47f4-83e5-ed0210fe7817 service nova] Releasing lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.613028] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528aaccc-25e1-9d94-6b8b-274cae01cbcd, 'name': SearchDatastore_Task, 'duration_secs': 0.015529} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.613986] env[61839]: DEBUG nova.network.neutron [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Updating instance_info_cache with network_info: [{"id": "32fd36fe-b511-4c88-9578-3d72ebf35d22", "address": "fa:16:3e:d8:66:37", "network": {"id": "cfa7171d-5d3c-4388-b20d-8960aa44c02b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a98e034276e44534a9feace637762da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32fd36fe-b5", "ovs_interfaceid": "32fd36fe-b511-4c88-9578-3d72ebf35d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.620681] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a1037e7-e548-45db-a1e3-c27dfc0c005f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.630979] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 832.630979] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd9003-26c4-064a-50e3-4e918e260a96" [ 832.630979] env[61839]: _type = "Task" [ 832.630979] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.644203] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd9003-26c4-064a-50e3-4e918e260a96, 'name': SearchDatastore_Task, 'duration_secs': 0.012057} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.644451] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.645103] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 39728872-2d30-48eb-90da-412f1e45971c/39728872-2d30-48eb-90da-412f1e45971c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.645816] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-972bf636-acf5-4d05-9645-30699eb2840c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.653602] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 832.653602] env[61839]: value = "task-1314442" [ 832.653602] env[61839]: _type = "Task" [ 832.653602] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.666213] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.672742] env[61839]: DEBUG nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 832.694272] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a242d3b0-5131-4888-830a-8c848380688b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.978s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.978902] env[61839]: DEBUG nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 832.986847] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314441, 'name': PowerOffVM_Task, 'duration_secs': 0.411353} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.988520] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 832.988799] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.990484] env[61839]: DEBUG nova.scheduler.client.report [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.993479] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359934c0-4352-4d87-a23d-3c494c461e39 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.003584] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.003584] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7aadf441-9bdb-4686-9835-a5b47f7ccc6b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.016705] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 833.016705] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 833.016705] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.017030] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 833.017248] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.017451] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 833.019524] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 833.019699] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 833.019879] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 833.020056] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 833.020234] env[61839]: DEBUG nova.virt.hardware [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 833.021409] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac659ece-afab-4d8b-93f3-1045a24fa4fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.029404] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51d8370-584c-4635-9ef4-bc9b4b0e55dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.070039] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.070039] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.070039] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleting the datastore file [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.070594] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c505c00-9cd9-4785-845c-d3efd866fa83 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.080713] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 833.080713] env[61839]: value = "task-1314444" [ 833.080713] env[61839]: _type = "Task" [ 833.080713] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.091073] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314444, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.121500] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Releasing lock "refresh_cache-49d4720b-83e3-47d9-b727-5bb255de2e7c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.121886] env[61839]: DEBUG nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Instance network_info: |[{"id": "32fd36fe-b511-4c88-9578-3d72ebf35d22", "address": "fa:16:3e:d8:66:37", "network": {"id": "cfa7171d-5d3c-4388-b20d-8960aa44c02b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a98e034276e44534a9feace637762da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32fd36fe-b5", "ovs_interfaceid": "32fd36fe-b511-4c88-9578-3d72ebf35d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 833.122282] env[61839]: DEBUG oslo_concurrency.lockutils [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] Acquired lock "refresh_cache-49d4720b-83e3-47d9-b727-5bb255de2e7c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.123332] env[61839]: DEBUG nova.network.neutron [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Refreshing network info cache for port 32fd36fe-b511-4c88-9578-3d72ebf35d22 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.123951] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:66:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '073f8535-6b3a-4d21-a754-4c975554dcbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32fd36fe-b511-4c88-9578-3d72ebf35d22', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.132035] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Creating folder: Project (766b763d07a649a3aab0e3299625641a). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.134095] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8cdc668e-d3fb-42ee-9df8-fb9675ef65d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.145377] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Created folder: Project (766b763d07a649a3aab0e3299625641a) in parent group-v281288. [ 833.145602] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Creating folder: Instances. Parent ref: group-v281348. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.145852] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9dc5407e-fe86-4c17-9f80-843c90824dbb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.159804] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Created folder: Instances in parent group-v281348. [ 833.160127] env[61839]: DEBUG oslo.service.loopingcall [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.161102] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.161702] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b319d3db-52b3-4a41-959c-d2753aad47c4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.186772] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314442, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.193416] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.193416] env[61839]: value = "task-1314447" [ 833.193416] env[61839]: _type = "Task" [ 833.193416] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.204862] env[61839]: DEBUG nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 833.209222] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314447, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.217256] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.365198] env[61839]: DEBUG nova.compute.manager [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 833.365411] env[61839]: DEBUG nova.compute.manager [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing instance network info cache due to event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 833.365631] env[61839]: DEBUG oslo_concurrency.lockutils [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] Acquiring lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.365780] env[61839]: DEBUG oslo_concurrency.lockutils [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] Acquired lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.365948] env[61839]: DEBUG nova.network.neutron [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.497931] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.498452] env[61839]: DEBUG nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 833.501519] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.501s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.504523] env[61839]: DEBUG nova.objects.instance [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lazy-loading 'resources' on Instance uuid e65da0fd-e877-4b25-a319-e4d65397056a {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 833.596542] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314444, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.452801} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.596542] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.597046] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 833.597046] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.619205] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "406da948-71c7-4c28-9ee3-10af64b1ab51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.619205] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.619783] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "406da948-71c7-4c28-9ee3-10af64b1ab51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.619783] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.619783] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.623030] env[61839]: INFO nova.compute.manager [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Terminating instance [ 833.624844] env[61839]: DEBUG nova.compute.manager [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 833.625076] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.625906] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d992a1c-d512-4ac1-af69-b1b475ed03b9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.635922] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.637088] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ded3fd90-b8e5-406a-a278-e1a55e1f32c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.643940] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 833.643940] env[61839]: value = "task-1314448" [ 833.643940] env[61839]: _type = "Task" [ 833.643940] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.652514] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.663805] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.730687} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.664109] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 39728872-2d30-48eb-90da-412f1e45971c/39728872-2d30-48eb-90da-412f1e45971c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 833.664338] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 833.664599] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-109bc3a6-952b-4493-b856-a1f427b2c844 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.671547] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 833.671547] env[61839]: value = "task-1314449" [ 833.671547] env[61839]: _type = "Task" [ 833.671547] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.683586] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.703538] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314447, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.728529] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.007665] env[61839]: DEBUG nova.compute.utils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.014245] env[61839]: DEBUG nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 834.014472] env[61839]: DEBUG nova.network.neutron [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 834.155668] env[61839]: DEBUG nova.network.neutron [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Updated VIF entry in instance network info cache for port 32fd36fe-b511-4c88-9578-3d72ebf35d22. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.156180] env[61839]: DEBUG nova.network.neutron [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Updating instance_info_cache with network_info: [{"id": "32fd36fe-b511-4c88-9578-3d72ebf35d22", "address": "fa:16:3e:d8:66:37", "network": {"id": "cfa7171d-5d3c-4388-b20d-8960aa44c02b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a98e034276e44534a9feace637762da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32fd36fe-b5", "ovs_interfaceid": "32fd36fe-b511-4c88-9578-3d72ebf35d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.163937] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.171921] env[61839]: DEBUG nova.policy [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 834.187482] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083479} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.190103] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.191141] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e8c5f5-acf4-4541-8fec-d2d04ae6bd19 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.202400] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314447, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.225448] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 39728872-2d30-48eb-90da-412f1e45971c/39728872-2d30-48eb-90da-412f1e45971c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.227742] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ba0cb36-013c-44e6-b05b-e5866d749b95 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.246087] env[61839]: DEBUG nova.network.neutron [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updated VIF entry in instance network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.246443] env[61839]: DEBUG nova.network.neutron [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.253375] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 834.253375] env[61839]: value = "task-1314450" [ 834.253375] env[61839]: _type = "Task" [ 834.253375] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.264964] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.356249] env[61839]: DEBUG nova.compute.manager [req-eed9c2cc-6f21-46b8-8b19-f08f6def3b04 req-f297f72c-6229-411c-8032-8215b2c54173 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Received event network-vif-plugged-8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.356374] env[61839]: DEBUG oslo_concurrency.lockutils [req-eed9c2cc-6f21-46b8-8b19-f08f6def3b04 req-f297f72c-6229-411c-8032-8215b2c54173 service nova] Acquiring lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.356656] env[61839]: DEBUG oslo_concurrency.lockutils [req-eed9c2cc-6f21-46b8-8b19-f08f6def3b04 req-f297f72c-6229-411c-8032-8215b2c54173 service nova] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.357295] env[61839]: DEBUG oslo_concurrency.lockutils [req-eed9c2cc-6f21-46b8-8b19-f08f6def3b04 req-f297f72c-6229-411c-8032-8215b2c54173 service nova] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.357295] env[61839]: DEBUG nova.compute.manager [req-eed9c2cc-6f21-46b8-8b19-f08f6def3b04 req-f297f72c-6229-411c-8032-8215b2c54173 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] No waiting events found dispatching network-vif-plugged-8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 834.357295] env[61839]: WARNING nova.compute.manager [req-eed9c2cc-6f21-46b8-8b19-f08f6def3b04 req-f297f72c-6229-411c-8032-8215b2c54173 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Received unexpected event network-vif-plugged-8d3be36a-f8da-4212-9f18-edefa1681f82 for instance with vm_state building and task_state spawning. [ 834.418444] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8898f5a-b0c4-4cc5-aa28-1f5c49f6ee31 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.427150] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc872690-033e-4875-89a0-4d3dd0890517 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.459682] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84907268-db32-4423-9080-7f2f4cd5cc74 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.468751] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27913b7e-fb15-4d3e-aee8-6cc3a788cb81 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.484028] env[61839]: DEBUG nova.compute.provider_tree [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.514904] env[61839]: DEBUG nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 834.630934] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.631440] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.631440] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.631599] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.631775] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.632084] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.632409] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.632543] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.632716] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.632887] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.633130] env[61839]: DEBUG nova.virt.hardware [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.634186] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac4fbc5-0383-45f1-b9ef-b1ef3baa3921 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.646075] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb37237-7465-470e-95e1-c45d0c17fb13 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.664164] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:3e:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.671056] env[61839]: DEBUG oslo.service.loopingcall [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.673895] env[61839]: DEBUG oslo_concurrency.lockutils [req-720232b6-763e-4500-b5f2-1c18c749490e req-18eeef58-7975-4664-8462-6a0b0448d0c9 service nova] Releasing lock "refresh_cache-49d4720b-83e3-47d9-b727-5bb255de2e7c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.674324] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.674612] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314448, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.674822] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82059eb1-5895-4d18-8c65-c18f019a32cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.690719] env[61839]: DEBUG nova.network.neutron [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Successfully updated port: 8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.700626] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.700626] env[61839]: value = "task-1314451" [ 834.700626] env[61839]: _type = "Task" [ 834.700626] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.710855] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314447, 'name': CreateVM_Task, 'duration_secs': 1.385205} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.711373] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.712236] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.712331] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.712756] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 834.719422] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-478395a6-8042-4a26-86f2-da01f133f0e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.721935] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314451, 'name': CreateVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.727072] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 834.727072] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52805aea-531e-215e-32de-d19dc27858e7" [ 834.727072] env[61839]: _type = "Task" [ 834.727072] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.738902] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52805aea-531e-215e-32de-d19dc27858e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.749891] env[61839]: DEBUG oslo_concurrency.lockutils [req-20046d13-99cf-415f-9f32-f6a5fa2655ad req-b8012a94-649a-40e2-b61f-5dfb26fd2a94 service nova] Releasing lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.763847] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314450, 'name': ReconfigVM_Task, 'duration_secs': 0.403624} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.764204] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 39728872-2d30-48eb-90da-412f1e45971c/39728872-2d30-48eb-90da-412f1e45971c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.764880] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cccc2c81-2b3f-4f52-a4a9-ae1423f52e37 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.771278] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 834.771278] env[61839]: value = "task-1314452" [ 834.771278] env[61839]: _type = "Task" [ 834.771278] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.780508] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314452, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.839041] env[61839]: DEBUG nova.network.neutron [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Successfully created port: 4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.989418] env[61839]: DEBUG nova.scheduler.client.report [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.159745] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314448, 'name': PowerOffVM_Task, 'duration_secs': 1.052688} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.160163] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.160532] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.160845] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a219c167-6919-4099-96f6-165af88ce7cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.193758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.194108] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquired lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.194322] env[61839]: DEBUG nova.network.neutron [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.212805] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314451, 'name': CreateVM_Task, 'duration_secs': 0.506162} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.213031] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 835.213703] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.226915] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.227253] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.227496] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Deleting the datastore file [datastore1] 406da948-71c7-4c28-9ee3-10af64b1ab51 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.227878] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0b341b8-46ce-47fd-9bb5-e0736c0011b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.243302] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52805aea-531e-215e-32de-d19dc27858e7, 'name': SearchDatastore_Task, 'duration_secs': 0.025864} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.244697] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.244946] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.245240] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.245400] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.245587] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.245913] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for the task: (returnval){ [ 835.245913] env[61839]: value = "task-1314457" [ 835.245913] env[61839]: _type = "Task" [ 835.245913] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.246151] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.246469] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 835.246726] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34da71e6-b78e-4038-9d2a-9e0f9dacb216 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.248947] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29919a31-bcd5-4190-b6dc-de3c88787b47 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.256556] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 835.256556] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b681b8-2d37-beaf-4125-70d2e51fd532" [ 835.256556] env[61839]: _type = "Task" [ 835.256556] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.259595] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.263616] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.264295] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.264775] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43c44af9-1f92-47f0-b9f5-8bc2913f3dd8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.271818] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 835.271818] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527f00e2-a1d6-7b86-42ca-142198cff8a5" [ 835.271818] env[61839]: _type = "Task" [ 835.271818] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.277477] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b681b8-2d37-beaf-4125-70d2e51fd532, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.285981] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314452, 'name': Rename_Task, 'duration_secs': 0.177652} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.289295] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.289569] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527f00e2-a1d6-7b86-42ca-142198cff8a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009494} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.289730] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a90b6e9-7376-4e07-8e90-3f9448f4f531 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.291614] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c00a983-ad89-49e1-8134-09b07f7e486d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.296369] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 835.296369] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef3071-b1bc-af83-580d-28450806b93a" [ 835.296369] env[61839]: _type = "Task" [ 835.296369] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.300297] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 835.300297] env[61839]: value = "task-1314458" [ 835.300297] env[61839]: _type = "Task" [ 835.300297] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.307631] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef3071-b1bc-af83-580d-28450806b93a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.311723] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.500136] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.998s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.503235] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.830s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.505339] env[61839]: INFO nova.compute.claims [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.528820] env[61839]: DEBUG nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 835.542108] env[61839]: INFO nova.scheduler.client.report [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted allocations for instance e65da0fd-e877-4b25-a319-e4d65397056a [ 835.559156] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.559503] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.559890] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.563408] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.563408] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.563408] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.563408] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.563408] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.563627] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.563627] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.563627] env[61839]: DEBUG nova.virt.hardware [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.568949] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89220983-5d18-4110-a20c-6cfda74188b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.576021] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc45ca39-ec68-4bda-b9a1-231461051d6d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.733613] env[61839]: DEBUG nova.network.neutron [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.762237] env[61839]: DEBUG oslo_vmware.api [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Task: {'id': task-1314457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19409} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.766104] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.766472] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.768020] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.768020] env[61839]: INFO nova.compute.manager [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Took 2.14 seconds to destroy the instance on the hypervisor. [ 835.768020] env[61839]: DEBUG oslo.service.loopingcall [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.768020] env[61839]: DEBUG nova.compute.manager [-] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 835.768020] env[61839]: DEBUG nova.network.neutron [-] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.775930] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b681b8-2d37-beaf-4125-70d2e51fd532, 'name': SearchDatastore_Task, 'duration_secs': 0.017727} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.776244] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.776466] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.776717] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.810630] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef3071-b1bc-af83-580d-28450806b93a, 'name': SearchDatastore_Task, 'duration_secs': 0.011224} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.813866] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.814261] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 49d4720b-83e3-47d9-b727-5bb255de2e7c/49d4720b-83e3-47d9-b727-5bb255de2e7c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.814561] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314458, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.814812] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.815046] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.815274] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eab9e04b-8ce5-47a7-b2be-5636553ccae5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.817248] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79e55f88-6ca1-4096-ade0-8f921fd408de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.824144] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 835.824144] env[61839]: value = "task-1314459" [ 835.824144] env[61839]: _type = "Task" [ 835.824144] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.828354] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.828554] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.830570] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb093d93-2994-4195-92dc-dd9feaf17892 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.834961] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.837770] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 835.837770] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fa4977-6879-804c-423c-fd5c5e6ebb8c" [ 835.837770] env[61839]: _type = "Task" [ 835.837770] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.847385] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fa4977-6879-804c-423c-fd5c5e6ebb8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.940073] env[61839]: DEBUG nova.network.neutron [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updating instance_info_cache with network_info: [{"id": "8d3be36a-f8da-4212-9f18-edefa1681f82", "address": "fa:16:3e:b8:82:1d", "network": {"id": "04db4c3e-de70-4119-8a0c-0b090344b55a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-422735768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "434f932a2a9e448d99fac449918affe9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d3be36a-f8", "ovs_interfaceid": "8d3be36a-f8da-4212-9f18-edefa1681f82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.050353] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a5856ade-37cd-4042-9d02-1ead51c7ffbc tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "e65da0fd-e877-4b25-a319-e4d65397056a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.969s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.098168] env[61839]: DEBUG nova.compute.manager [req-97dd7338-b4b6-485c-b9e3-600ffd6d3146 req-9f4b6519-51db-4990-90a5-4e66085a39c0 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Received event network-vif-deleted-bc314e6e-7005-4b41-8a6a-929b75782af8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.098380] env[61839]: INFO nova.compute.manager [req-97dd7338-b4b6-485c-b9e3-600ffd6d3146 req-9f4b6519-51db-4990-90a5-4e66085a39c0 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Neutron deleted interface bc314e6e-7005-4b41-8a6a-929b75782af8; detaching it from the instance and deleting it from the info cache [ 836.098559] env[61839]: DEBUG nova.network.neutron [req-97dd7338-b4b6-485c-b9e3-600ffd6d3146 req-9f4b6519-51db-4990-90a5-4e66085a39c0 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.311345] env[61839]: DEBUG oslo_vmware.api [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314458, 'name': PowerOnVM_Task, 'duration_secs': 0.870277} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.311635] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.311864] env[61839]: INFO nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Took 9.39 seconds to spawn the instance on the hypervisor. [ 836.312077] env[61839]: DEBUG nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 836.312858] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff02af0-f76b-4c92-8647-b8dfcef56d54 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.332728] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.347486] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52fa4977-6879-804c-423c-fd5c5e6ebb8c, 'name': SearchDatastore_Task, 'duration_secs': 0.00943} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.348304] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-519e3005-a84c-471b-a8c0-a2b66633f6b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.354767] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 836.354767] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5263888c-6564-8186-b057-20f27a99f6a3" [ 836.354767] env[61839]: _type = "Task" [ 836.354767] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.363622] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5263888c-6564-8186-b057-20f27a99f6a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.387244] env[61839]: DEBUG nova.compute.manager [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Received event network-changed-8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.387442] env[61839]: DEBUG nova.compute.manager [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Refreshing instance network info cache due to event network-changed-8d3be36a-f8da-4212-9f18-edefa1681f82. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 836.387638] env[61839]: DEBUG oslo_concurrency.lockutils [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] Acquiring lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.442851] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Releasing lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.443200] env[61839]: DEBUG nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Instance network_info: |[{"id": "8d3be36a-f8da-4212-9f18-edefa1681f82", "address": "fa:16:3e:b8:82:1d", "network": {"id": "04db4c3e-de70-4119-8a0c-0b090344b55a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-422735768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "434f932a2a9e448d99fac449918affe9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d3be36a-f8", "ovs_interfaceid": "8d3be36a-f8da-4212-9f18-edefa1681f82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 836.443518] env[61839]: DEBUG oslo_concurrency.lockutils [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] Acquired lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.443701] env[61839]: DEBUG nova.network.neutron [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Refreshing network info cache for port 8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.444987] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:82:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca99f7a1-6365-4d3c-af16-1b1c1288091e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d3be36a-f8da-4212-9f18-edefa1681f82', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.453826] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Creating folder: Project (434f932a2a9e448d99fac449918affe9). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.455089] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f8de36a-aa83-467d-b3f3-240e6217cb8e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.466485] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Created folder: Project (434f932a2a9e448d99fac449918affe9) in parent group-v281288. [ 836.466672] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Creating folder: Instances. Parent ref: group-v281355. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.466894] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e746d8a0-f739-4c86-8186-ef33d1a81e30 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.474931] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Created folder: Instances in parent group-v281355. [ 836.475173] env[61839]: DEBUG oslo.service.loopingcall [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.475356] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.475548] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b4947de-648c-457e-8be4-ff9ca5fe19a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.496403] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.496403] env[61839]: value = "task-1314462" [ 836.496403] env[61839]: _type = "Task" [ 836.496403] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.504130] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314462, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.546629] env[61839]: DEBUG nova.network.neutron [-] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.600835] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3212c074-62cb-4d06-b2a7-ae01ab975318 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.614089] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d889be-658a-4c07-b9f3-35b3d94555e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.654240] env[61839]: DEBUG nova.compute.manager [req-97dd7338-b4b6-485c-b9e3-600ffd6d3146 req-9f4b6519-51db-4990-90a5-4e66085a39c0 service nova] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Detach interface failed, port_id=bc314e6e-7005-4b41-8a6a-929b75782af8, reason: Instance 406da948-71c7-4c28-9ee3-10af64b1ab51 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 836.835023] env[61839]: INFO nova.compute.manager [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Took 33.85 seconds to build instance. [ 836.846542] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314459, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.867220] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5263888c-6564-8186-b057-20f27a99f6a3, 'name': SearchDatastore_Task, 'duration_secs': 0.032913} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.867949] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.867949] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.868443] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e551ecd-26ca-4dc9-a0b2-732a6d4c6756 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.879353] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 836.879353] env[61839]: value = "task-1314463" [ 836.879353] env[61839]: _type = "Task" [ 836.879353] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.889429] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.908139] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12880dc9-734e-4e9b-b667-659a9825b117 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.917523] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15143c6c-f857-4b60-a03c-5e6d17b692ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.962423] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ce227f-b3fd-4524-9634-faa2f1bff844 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.971412] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7b1206-955d-4848-b9fd-1defaccd67b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.986642] env[61839]: DEBUG nova.compute.provider_tree [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.006684] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314462, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.049199] env[61839]: INFO nova.compute.manager [-] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Took 1.28 seconds to deallocate network for instance. [ 837.079021] env[61839]: DEBUG nova.network.neutron [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Successfully updated port: 4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.235425] env[61839]: DEBUG nova.network.neutron [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updated VIF entry in instance network info cache for port 8d3be36a-f8da-4212-9f18-edefa1681f82. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.235798] env[61839]: DEBUG nova.network.neutron [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updating instance_info_cache with network_info: [{"id": "8d3be36a-f8da-4212-9f18-edefa1681f82", "address": "fa:16:3e:b8:82:1d", "network": {"id": "04db4c3e-de70-4119-8a0c-0b090344b55a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-422735768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "434f932a2a9e448d99fac449918affe9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d3be36a-f8", "ovs_interfaceid": "8d3be36a-f8da-4212-9f18-edefa1681f82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.344234] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09e423b-508c-42ac-8e81-438f1973d598 tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "39728872-2d30-48eb-90da-412f1e45971c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.773s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.344589] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314459, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.099992} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.345879] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 49d4720b-83e3-47d9-b727-5bb255de2e7c/49d4720b-83e3-47d9-b727-5bb255de2e7c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 837.346142] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.346543] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0ccb411-c3af-4fda-abb5-11ff9b44a569 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.358674] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 837.358674] env[61839]: value = "task-1314464" [ 837.358674] env[61839]: _type = "Task" [ 837.358674] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.370027] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.393080] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314463, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.489866] env[61839]: DEBUG nova.scheduler.client.report [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.512331] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314462, 'name': CreateVM_Task, 'duration_secs': 0.557405} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.512521] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 837.513269] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.513441] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.513779] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 837.514120] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-922e2637-aa72-48b2-8975-69aa698c25fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.520475] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 837.520475] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a933b2-8914-17db-646f-8a2689fb7e64" [ 837.520475] env[61839]: _type = "Task" [ 837.520475] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.530084] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a933b2-8914-17db-646f-8a2689fb7e64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.560429] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.584487] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.584673] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.584836] env[61839]: DEBUG nova.network.neutron [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.739227] env[61839]: DEBUG oslo_concurrency.lockutils [req-7441b539-9a8c-417e-a819-0dd4484965de req-ba2acb55-32f0-427a-9533-17df51904d96 service nova] Releasing lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.858409] env[61839]: DEBUG nova.compute.manager [None req-a774af97-4099-489d-9f9d-978b25469ec3 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 36485aa7-51c9-4cab-bfbd-739fdc09af17] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 837.871933] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150458} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.872960] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.873381] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b207541-5907-460a-8178-e421520c6b17 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.900613] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 49d4720b-83e3-47d9-b727-5bb255de2e7c/49d4720b-83e3-47d9-b727-5bb255de2e7c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.906085] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5447d70b-d719-486e-ae72-29ce5c31aeeb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.927578] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314463, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.860215} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.928877] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 837.929117] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.929552] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 837.929552] env[61839]: value = "task-1314465" [ 837.929552] env[61839]: _type = "Task" [ 837.929552] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.929700] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4f8a001-c4ea-4149-ae18-99f822c68adf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.939768] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314465, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.941428] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 837.941428] env[61839]: value = "task-1314466" [ 837.941428] env[61839]: _type = "Task" [ 837.941428] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.949376] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314466, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.001720] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.002635] env[61839]: DEBUG nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 838.006885] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.004s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.007147] env[61839]: DEBUG nova.objects.instance [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'resources' on Instance uuid a661cc10-5c4e-421b-b70b-189f0a613e8a {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.031315] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a933b2-8914-17db-646f-8a2689fb7e64, 'name': SearchDatastore_Task, 'duration_secs': 0.05265} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.031716] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.031923] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.032253] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.032423] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.032609] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.032893] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2c7e9f0-01d7-4ffe-8a27-491ab6492428 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.046054] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.046054] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.046828] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-933e673d-7d37-49b7-b3d1-5f040a91e0b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.052220] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 838.052220] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52205316-90ba-785c-6b18-60970068146e" [ 838.052220] env[61839]: _type = "Task" [ 838.052220] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.060828] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52205316-90ba-785c-6b18-60970068146e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.120444] env[61839]: DEBUG nova.network.neutron [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.126052] env[61839]: DEBUG nova.compute.manager [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Received event network-changed-2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.126257] env[61839]: DEBUG nova.compute.manager [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Refreshing instance network info cache due to event network-changed-2615f303-83dd-4c9f-b09e-7cfb5da3276f. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.126475] env[61839]: DEBUG oslo_concurrency.lockutils [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] Acquiring lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.126642] env[61839]: DEBUG oslo_concurrency.lockutils [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] Acquired lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.126860] env[61839]: DEBUG nova.network.neutron [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Refreshing network info cache for port 2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.257124] env[61839]: DEBUG nova.network.neutron [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.362190] env[61839]: DEBUG nova.compute.manager [None req-a774af97-4099-489d-9f9d-978b25469ec3 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 36485aa7-51c9-4cab-bfbd-739fdc09af17] Instance disappeared before build. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.411057] env[61839]: DEBUG nova.compute.manager [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received event network-vif-plugged-4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.411293] env[61839]: DEBUG oslo_concurrency.lockutils [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.411510] env[61839]: DEBUG oslo_concurrency.lockutils [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.411687] env[61839]: DEBUG oslo_concurrency.lockutils [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.411858] env[61839]: DEBUG nova.compute.manager [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] No waiting events found dispatching network-vif-plugged-4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.412065] env[61839]: WARNING nova.compute.manager [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received unexpected event network-vif-plugged-4373753c-2ab4-4f61-8117-89f623225621 for instance with vm_state building and task_state spawning. [ 838.412244] env[61839]: DEBUG nova.compute.manager [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received event network-changed-4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.412401] env[61839]: DEBUG nova.compute.manager [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Refreshing instance network info cache due to event network-changed-4373753c-2ab4-4f61-8117-89f623225621. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.412570] env[61839]: DEBUG oslo_concurrency.lockutils [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] Acquiring lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.440932] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.450134] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314466, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0656} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.450397] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 838.451149] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969c911b-1389-4c3b-88ff-3fed9d01d97e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.473458] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 838.473792] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97766a4b-1fac-42c8-a05d-ef8f14c721dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.495343] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 838.495343] env[61839]: value = "task-1314468" [ 838.495343] env[61839]: _type = "Task" [ 838.495343] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.502856] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.508237] env[61839]: DEBUG nova.compute.utils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.509680] env[61839]: DEBUG nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 838.509771] env[61839]: DEBUG nova.network.neutron [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.567877] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52205316-90ba-785c-6b18-60970068146e, 'name': SearchDatastore_Task, 'duration_secs': 0.048228} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.570562] env[61839]: DEBUG nova.policy [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef80f7db912a4e33a5a50e7432a01ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d28bf7713204dfb9682d9c002cb5449', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 838.572593] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ee180a6-c997-4ed9-96ba-ca0be6b048da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.581864] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 838.581864] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b8b3e-0f14-52d3-1714-b3b8c368197a" [ 838.581864] env[61839]: _type = "Task" [ 838.581864] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.589805] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b8b3e-0f14-52d3-1714-b3b8c368197a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.759457] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.759792] env[61839]: DEBUG nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Instance network_info: |[{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 838.760348] env[61839]: DEBUG oslo_concurrency.lockutils [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] Acquired lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.760540] env[61839]: DEBUG nova.network.neutron [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Refreshing network info cache for port 4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.761762] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:8e:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '572b7281-aad3-45fa-9cb2-fc1c70569948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4373753c-2ab4-4f61-8117-89f623225621', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.770743] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Creating folder: Project (7e03467b7fba46a9aac1562a1cb8368e). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.772132] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff27e645-f365-42b2-8060-15ba8320f85c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.786431] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Created folder: Project (7e03467b7fba46a9aac1562a1cb8368e) in parent group-v281288. [ 838.786536] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Creating folder: Instances. Parent ref: group-v281358. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.786718] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d90fdc58-f2cc-429d-9fab-a1c1c8442efe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.795873] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Created folder: Instances in parent group-v281358. [ 838.796238] env[61839]: DEBUG oslo.service.loopingcall [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.796672] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.796985] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ae2b765-ae30-436e-afaa-cec98a23c70b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.821511] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.821511] env[61839]: value = "task-1314471" [ 838.821511] env[61839]: _type = "Task" [ 838.821511] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.829516] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314471, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.863454] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750143eb-efc8-4981-be2b-7707618386d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.878307] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904990a6-6af8-4ba7-9309-b2f023a2deb1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.918610] env[61839]: DEBUG nova.network.neutron [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Successfully created port: 6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.921513] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a774af97-4099-489d-9f9d-978b25469ec3 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "36485aa7-51c9-4cab-bfbd-739fdc09af17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.674s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.925126] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15184fc4-f53c-4584-ad94-37ecf258e040 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.943142] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac08865-71d8-4d58-9f5e-53a35b5789a7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.953338] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314465, 'name': ReconfigVM_Task, 'duration_secs': 0.990798} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.961715] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 49d4720b-83e3-47d9-b727-5bb255de2e7c/49d4720b-83e3-47d9-b727-5bb255de2e7c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.964128] env[61839]: DEBUG nova.compute.provider_tree [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.964447] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e281eb4a-de60-4d47-ac96-2e6a3e487b9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.972458] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 838.972458] env[61839]: value = "task-1314472" [ 838.972458] env[61839]: _type = "Task" [ 838.972458] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.984570] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314472, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.000780] env[61839]: DEBUG nova.network.neutron [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Updated VIF entry in instance network info cache for port 2615f303-83dd-4c9f-b09e-7cfb5da3276f. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.003052] env[61839]: DEBUG nova.network.neutron [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Updating instance_info_cache with network_info: [{"id": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "address": "fa:16:3e:33:57:53", "network": {"id": "4f5987f4-a100-468f-86db-010c27144844", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-185915171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9a08aa6b2064b06874f5a2a3063d8b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2615f303-83", "ovs_interfaceid": "2615f303-83dd-4c9f-b09e-7cfb5da3276f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.008729] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.016030] env[61839]: DEBUG nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 839.093362] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b8b3e-0f14-52d3-1714-b3b8c368197a, 'name': SearchDatastore_Task, 'duration_secs': 0.052914} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.093649] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.094030] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 62959833-5834-4c0a-bf4e-3ac1157b3b0c/62959833-5834-4c0a-bf4e-3ac1157b3b0c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.094372] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c7295a2-2009-41ac-a579-19b0313b9f98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.101589] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 839.101589] env[61839]: value = "task-1314473" [ 839.101589] env[61839]: _type = "Task" [ 839.101589] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.110765] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314473, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.331466] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314471, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.430390] env[61839]: DEBUG nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 839.467584] env[61839]: DEBUG nova.scheduler.client.report [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.484050] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314472, 'name': Rename_Task, 'duration_secs': 0.226264} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.484050] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.484232] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efc413d2-7dc3-4114-a295-77d321b32a23 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.494024] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 839.494024] env[61839]: value = "task-1314474" [ 839.494024] env[61839]: _type = "Task" [ 839.494024] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.499622] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.506648] env[61839]: DEBUG oslo_concurrency.lockutils [req-de68ba83-eba6-4411-b791-c0623ae09cf1 req-7734c08a-97a7-4d10-83ef-added22e32df service nova] Releasing lock "refresh_cache-39728872-2d30-48eb-90da-412f1e45971c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.510333] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314468, 'name': ReconfigVM_Task, 'duration_secs': 0.868072} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.510865] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164/047080fa-8781-47b1-89d8-2e4c8031b164.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.511340] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f53a3055-6a81-4937-a6d5-9193312469eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.517909] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 839.517909] env[61839]: value = "task-1314475" [ 839.517909] env[61839]: _type = "Task" [ 839.517909] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.530754] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314475, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.552720] env[61839]: DEBUG nova.network.neutron [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updated VIF entry in instance network info cache for port 4373753c-2ab4-4f61-8117-89f623225621. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.553200] env[61839]: DEBUG nova.network.neutron [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.612446] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314473, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.834318] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314471, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.956061] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.973014] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.975343] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.588s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.976798] env[61839]: INFO nova.compute.claims [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.992018] env[61839]: INFO nova.scheduler.client.report [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted allocations for instance a661cc10-5c4e-421b-b70b-189f0a613e8a [ 840.007161] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314474, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.029592] env[61839]: DEBUG nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 840.032342] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314475, 'name': Rename_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.057070] env[61839]: DEBUG oslo_concurrency.lockutils [req-df8fa0a9-e1ca-488b-8d49-559394c451e1 req-6caba306-0d44-471d-b3f4-f6da18f01a03 service nova] Releasing lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.060466] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.060700] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.060859] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.061058] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.061217] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.061369] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.061575] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.061735] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.061912] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.062115] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.062299] env[61839]: DEBUG nova.virt.hardware [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.063508] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a240df-0c94-425b-aad7-55dd565a1519 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.071156] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531cdcda-d9b2-4637-bdd5-bf575c58ef6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.111115] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314473, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.771217} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.111412] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 62959833-5834-4c0a-bf4e-3ac1157b3b0c/62959833-5834-4c0a-bf4e-3ac1157b3b0c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.111656] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.111915] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f60c168-1dc1-4056-8469-4e5a0ee2aa53 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.118075] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 840.118075] env[61839]: value = "task-1314477" [ 840.118075] env[61839]: _type = "Task" [ 840.118075] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.124780] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.342114] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314471, 'name': CreateVM_Task, 'duration_secs': 1.449264} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.342114] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.342310] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.342475] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.342854] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 840.343245] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb7a43c3-90ca-4f6e-9a6f-307a3db7d515 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.353378] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 840.353378] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c53056-21cc-62bb-e763-65ff1e6f6e27" [ 840.353378] env[61839]: _type = "Task" [ 840.353378] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.364832] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c53056-21cc-62bb-e763-65ff1e6f6e27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.429803] env[61839]: DEBUG nova.compute.manager [req-4d99cfd0-20c2-4bf8-a906-ec2d14f846b3 req-4395897a-8762-481b-9f39-3daa86804a4a service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Received event network-vif-plugged-6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.430048] env[61839]: DEBUG oslo_concurrency.lockutils [req-4d99cfd0-20c2-4bf8-a906-ec2d14f846b3 req-4395897a-8762-481b-9f39-3daa86804a4a service nova] Acquiring lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.430293] env[61839]: DEBUG oslo_concurrency.lockutils [req-4d99cfd0-20c2-4bf8-a906-ec2d14f846b3 req-4395897a-8762-481b-9f39-3daa86804a4a service nova] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.430467] env[61839]: DEBUG oslo_concurrency.lockutils [req-4d99cfd0-20c2-4bf8-a906-ec2d14f846b3 req-4395897a-8762-481b-9f39-3daa86804a4a service nova] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.430637] env[61839]: DEBUG nova.compute.manager [req-4d99cfd0-20c2-4bf8-a906-ec2d14f846b3 req-4395897a-8762-481b-9f39-3daa86804a4a service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] No waiting events found dispatching network-vif-plugged-6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 840.430804] env[61839]: WARNING nova.compute.manager [req-4d99cfd0-20c2-4bf8-a906-ec2d14f846b3 req-4395897a-8762-481b-9f39-3daa86804a4a service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Received unexpected event network-vif-plugged-6ae0317a-8af8-438c-864f-d31b4d288dab for instance with vm_state building and task_state spawning. [ 840.505454] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6d3177cd-a4b0-4e7c-90c9-b61ffa1d799f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a661cc10-5c4e-421b-b70b-189f0a613e8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.861s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.507306] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314474, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.530927] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314475, 'name': Rename_Task, 'duration_secs': 0.535773} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.531282] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.531543] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35542763-3d7d-4940-9b54-b96535b2d520 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.537908] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 840.537908] env[61839]: value = "task-1314478" [ 840.537908] env[61839]: _type = "Task" [ 840.537908] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.546020] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.630018] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.255615} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.630215] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 840.631098] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fed256-853c-4883-b143-0f1d7e714eb7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.655820] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 62959833-5834-4c0a-bf4e-3ac1157b3b0c/62959833-5834-4c0a-bf4e-3ac1157b3b0c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.656202] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b49529a9-85c7-40b6-8895-6393f845935f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.677171] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 840.677171] env[61839]: value = "task-1314479" [ 840.677171] env[61839]: _type = "Task" [ 840.677171] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.685550] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.863571] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c53056-21cc-62bb-e763-65ff1e6f6e27, 'name': SearchDatastore_Task, 'duration_secs': 0.01496} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.863912] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.864244] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.864466] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.864655] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.865143] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.865242] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20189e8f-a102-4ac9-ab11-57a40a02ec63 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.873824] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.873824] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.874172] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60a94218-aabf-4a90-a14c-e4b95272f996 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.880256] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 840.880256] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528567c6-6352-97fe-1057-cb27ce011808" [ 840.880256] env[61839]: _type = "Task" [ 840.880256] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.888313] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528567c6-6352-97fe-1057-cb27ce011808, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.989833] env[61839]: DEBUG nova.network.neutron [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Successfully updated port: 6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.004480] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314474, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.014435] env[61839]: DEBUG nova.compute.manager [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Received event network-changed-6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.014640] env[61839]: DEBUG nova.compute.manager [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Refreshing instance network info cache due to event network-changed-6ae0317a-8af8-438c-864f-d31b4d288dab. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 841.014861] env[61839]: DEBUG oslo_concurrency.lockutils [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] Acquiring lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.015022] env[61839]: DEBUG oslo_concurrency.lockutils [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] Acquired lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.015341] env[61839]: DEBUG nova.network.neutron [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Refreshing network info cache for port 6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.051782] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314478, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.187578] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314479, 'name': ReconfigVM_Task, 'duration_secs': 0.38009} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.187929] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 62959833-5834-4c0a-bf4e-3ac1157b3b0c/62959833-5834-4c0a-bf4e-3ac1157b3b0c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.188870] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a26bd0e-401b-4977-8d51-b641f29592a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.197490] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 841.197490] env[61839]: value = "task-1314480" [ 841.197490] env[61839]: _type = "Task" [ 841.197490] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.207341] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314480, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.289514] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f621877f-2452-41d0-b3e0-eac216bf18ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.297566] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e687ef68-2e2c-47bd-9957-8f2d73457299 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.329696] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f8b726-992f-4f22-b45b-7a6ae3f15b78 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.338161] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c435d7b-63ad-4f42-b87c-6f6e709e3a16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.352977] env[61839]: DEBUG nova.compute.provider_tree [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.390930] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528567c6-6352-97fe-1057-cb27ce011808, 'name': SearchDatastore_Task, 'duration_secs': 0.011637} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.391867] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb3256e6-4404-4515-80fa-482fe48b2e49 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.397486] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 841.397486] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52498871-4c35-c517-399b-c10809228da4" [ 841.397486] env[61839]: _type = "Task" [ 841.397486] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.405681] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52498871-4c35-c517-399b-c10809228da4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.416857] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a4463efc-ffca-4552-a072-cbf5fe062533" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.416857] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.492112] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.504398] env[61839]: DEBUG oslo_vmware.api [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314474, 'name': PowerOnVM_Task, 'duration_secs': 1.666718} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.504677] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.505047] env[61839]: INFO nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Took 11.00 seconds to spawn the instance on the hypervisor. [ 841.505123] env[61839]: DEBUG nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 841.505872] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd00d96-fc03-4b5f-acdc-778b950a090f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.548468] env[61839]: DEBUG oslo_vmware.api [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314478, 'name': PowerOnVM_Task, 'duration_secs': 0.706045} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.548804] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.549054] env[61839]: DEBUG nova.compute.manager [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 841.549854] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90796cb2-e8e8-4ef9-beb5-ca730ab9e638 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.553189] env[61839]: DEBUG nova.network.neutron [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.630101] env[61839]: DEBUG nova.network.neutron [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.707744] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314480, 'name': Rename_Task, 'duration_secs': 0.14197} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.708050] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.708313] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b9e8bc4-c6a2-41e1-9478-fea4adf3bcb4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.714760] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 841.714760] env[61839]: value = "task-1314481" [ 841.714760] env[61839]: _type = "Task" [ 841.714760] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.722899] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.856837] env[61839]: DEBUG nova.scheduler.client.report [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 841.908388] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52498871-4c35-c517-399b-c10809228da4, 'name': SearchDatastore_Task, 'duration_secs': 0.013772} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.908645] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.908914] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 40c54d84-8e50-483a-b4e0-5f1cc72b0880/40c54d84-8e50-483a-b4e0-5f1cc72b0880.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.909210] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-094ba801-ee4a-49fd-8f6d-f67c0eacfb81 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.916196] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 841.916196] env[61839]: value = "task-1314482" [ 841.916196] env[61839]: _type = "Task" [ 841.916196] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.924439] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.024335] env[61839]: INFO nova.compute.manager [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Took 36.36 seconds to build instance. [ 842.067476] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.132384] env[61839]: DEBUG oslo_concurrency.lockutils [req-e9ab73a7-f115-48d8-9eaf-7fa18fbc48ac req-656aa464-7fb0-41e9-8914-5673c6dc6e03 service nova] Releasing lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.132885] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.133104] env[61839]: DEBUG nova.network.neutron [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.229924] env[61839]: DEBUG oslo_vmware.api [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314481, 'name': PowerOnVM_Task, 'duration_secs': 0.429169} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.230362] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.230744] env[61839]: INFO nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Took 9.25 seconds to spawn the instance on the hypervisor. [ 842.230972] env[61839]: DEBUG nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 842.231984] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23fe90b-362d-4a8a-9767-c5545f907c9d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.364058] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.388s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.364058] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 842.366598] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.978s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.368377] env[61839]: INFO nova.compute.claims [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.430591] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314482, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.528400] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3b56f212-0cbc-4766-86f9-61b8fddca264 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.554s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.665918] env[61839]: DEBUG nova.network.neutron [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.754199] env[61839]: INFO nova.compute.manager [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Took 35.28 seconds to build instance. [ 842.818583] env[61839]: DEBUG nova.network.neutron [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Updating instance_info_cache with network_info: [{"id": "6ae0317a-8af8-438c-864f-d31b4d288dab", "address": "fa:16:3e:0b:3e:68", "network": {"id": "9ad4a7c4-51fa-42e2-927a-24d25b423b8b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1594396457-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d28bf7713204dfb9682d9c002cb5449", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae0317a-8a", "ovs_interfaceid": "6ae0317a-8af8-438c-864f-d31b4d288dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.873087] env[61839]: DEBUG nova.compute.utils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 842.879183] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 842.879183] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 842.918216] env[61839]: DEBUG nova.policy [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9360f9fcba74a5aae33fdec3660b790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fefa976ea98445f1b4e719d3e3a1e8af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 842.931523] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314482, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.641292} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.931933] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 40c54d84-8e50-483a-b4e0-5f1cc72b0880/40c54d84-8e50-483a-b4e0-5f1cc72b0880.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.932240] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.932509] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd1a7fd9-43a9-4abd-a4ab-4eece9600c86 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.940054] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 842.940054] env[61839]: value = "task-1314484" [ 842.940054] env[61839]: _type = "Task" [ 842.940054] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.948716] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.030921] env[61839]: DEBUG nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 843.199609] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Successfully created port: f7226659-074b-4580-8d02-5d637bbba250 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.212204] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "047080fa-8781-47b1-89d8-2e4c8031b164" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.212541] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "047080fa-8781-47b1-89d8-2e4c8031b164" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.212806] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "047080fa-8781-47b1-89d8-2e4c8031b164-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.213144] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "047080fa-8781-47b1-89d8-2e4c8031b164-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.213712] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "047080fa-8781-47b1-89d8-2e4c8031b164-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.215678] env[61839]: INFO nova.compute.manager [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Terminating instance [ 843.220976] env[61839]: DEBUG nova.compute.manager [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 843.221201] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.222110] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea5c140-80a4-4ecc-add1-c26a45585302 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.233898] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.234268] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2573a22-839a-4c25-96d3-6dcf9819dbcd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.244737] env[61839]: DEBUG oslo_vmware.api [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 843.244737] env[61839]: value = "task-1314485" [ 843.244737] env[61839]: _type = "Task" [ 843.244737] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.257126] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3697bd91-57ab-4763-82ae-06bd5d1d7501 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.916s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.257126] env[61839]: DEBUG oslo_vmware.api [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.321818] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.322284] env[61839]: DEBUG nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Instance network_info: |[{"id": "6ae0317a-8af8-438c-864f-d31b4d288dab", "address": "fa:16:3e:0b:3e:68", "network": {"id": "9ad4a7c4-51fa-42e2-927a-24d25b423b8b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1594396457-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d28bf7713204dfb9682d9c002cb5449", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae0317a-8a", "ovs_interfaceid": "6ae0317a-8af8-438c-864f-d31b4d288dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 843.323333] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:3e:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ae0317a-8af8-438c-864f-d31b4d288dab', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.333190] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Creating folder: Project (9d28bf7713204dfb9682d9c002cb5449). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.333594] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87001809-6ec1-4546-8a8d-7f8ee199a3ef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.345731] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Created folder: Project (9d28bf7713204dfb9682d9c002cb5449) in parent group-v281288. [ 843.345906] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Creating folder: Instances. Parent ref: group-v281361. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.346212] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cebd00f2-8d8f-489d-9a8b-634483ec71d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.360093] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Created folder: Instances in parent group-v281361. [ 843.360951] env[61839]: DEBUG oslo.service.loopingcall [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.361429] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 843.361796] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-892147ac-5dae-41d8-acf9-6c56decd9683 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.377772] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 843.385171] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 843.385171] env[61839]: value = "task-1314488" [ 843.385171] env[61839]: _type = "Task" [ 843.385171] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.394168] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314488, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.460739] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.328386} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.460992] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.462118] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31448fbb-600a-4586-ac58-982ddef1b613 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.490677] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 40c54d84-8e50-483a-b4e0-5f1cc72b0880/40c54d84-8e50-483a-b4e0-5f1cc72b0880.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.494164] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b4a88f5-f69c-48f7-9c2d-5bfcf83128ef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.514985] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 843.514985] env[61839]: value = "task-1314489" [ 843.514985] env[61839]: _type = "Task" [ 843.514985] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.525928] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314489, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.555617] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.683232] env[61839]: DEBUG nova.compute.manager [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Received event network-changed-8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 843.683526] env[61839]: DEBUG nova.compute.manager [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Refreshing instance network info cache due to event network-changed-8d3be36a-f8da-4212-9f18-edefa1681f82. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 843.683753] env[61839]: DEBUG oslo_concurrency.lockutils [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] Acquiring lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.683981] env[61839]: DEBUG oslo_concurrency.lockutils [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] Acquired lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.684087] env[61839]: DEBUG nova.network.neutron [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Refreshing network info cache for port 8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 843.741615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "49d4720b-83e3-47d9-b727-5bb255de2e7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.741615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.741615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "49d4720b-83e3-47d9-b727-5bb255de2e7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.741615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.742941] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.743722] env[61839]: INFO nova.compute.manager [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Terminating instance [ 843.752162] env[61839]: DEBUG nova.compute.manager [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 843.752162] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.752162] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4de259d-e9b6-401b-9242-96524afa82cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.760489] env[61839]: DEBUG nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 843.763377] env[61839]: DEBUG oslo_vmware.api [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314485, 'name': PowerOffVM_Task, 'duration_secs': 0.300972} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.769059] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 843.769323] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 843.769637] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.770229] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaea20b5-5143-4c36-b8dc-d083be204512 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.773285] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-241dda65-1b26-4b5a-b431-69581ca0712d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.786375] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "fa8a2265-291e-4424-bea1-72574e495a72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.786574] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.786905] env[61839]: DEBUG oslo_vmware.api [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 843.786905] env[61839]: value = "task-1314491" [ 843.786905] env[61839]: _type = "Task" [ 843.786905] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.799957] env[61839]: DEBUG oslo_vmware.api [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.807315] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3854c188-f11b-458d-a93f-f6094a790add {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.816110] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2a9188-74d5-4662-8351-011591bd20e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.855886] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f5f9d9-3b47-4ab2-8087-dce6ecd3c7e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.858917] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 843.859128] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 843.859341] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleting the datastore file [datastore1] 047080fa-8781-47b1-89d8-2e4c8031b164 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.860060] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3f4669e-b2f9-4caa-a10a-7ddce71522f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.869052] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b689b37b-4004-4c13-83cf-f395672831b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.873370] env[61839]: DEBUG oslo_vmware.api [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 843.873370] env[61839]: value = "task-1314492" [ 843.873370] env[61839]: _type = "Task" [ 843.873370] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.888969] env[61839]: DEBUG nova.compute.provider_tree [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.898031] env[61839]: DEBUG oslo_vmware.api [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.903990] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314488, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.026293] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314489, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.284944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.295912] env[61839]: DEBUG oslo_vmware.api [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314491, 'name': PowerOffVM_Task, 'duration_secs': 0.248404} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.296198] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.296401] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.296663] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62bc90be-e5cb-4f6c-b456-099a14b44113 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.367842] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.368094] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.368287] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Deleting the datastore file [datastore1] 49d4720b-83e3-47d9-b727-5bb255de2e7c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.368564] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dd43bc5-0877-418a-a4a5-737c5efb5826 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.375637] env[61839]: DEBUG oslo_vmware.api [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for the task: (returnval){ [ 844.375637] env[61839]: value = "task-1314495" [ 844.375637] env[61839]: _type = "Task" [ 844.375637] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.392365] env[61839]: DEBUG oslo_vmware.api [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16221} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.393384] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 844.395744] env[61839]: DEBUG nova.scheduler.client.report [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.399019] env[61839]: DEBUG oslo_vmware.api [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.399155] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.399339] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 844.399520] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 844.399693] env[61839]: INFO nova.compute.manager [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Took 1.18 seconds to destroy the instance on the hypervisor. [ 844.399929] env[61839]: DEBUG oslo.service.loopingcall [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.403020] env[61839]: DEBUG nova.compute.manager [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 844.403121] env[61839]: DEBUG nova.network.neutron [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 844.410241] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.410484] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.410712] env[61839]: INFO nova.compute.manager [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Rebooting instance [ 844.411911] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314488, 'name': CreateVM_Task, 'duration_secs': 0.602277} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.415873] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.415873] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.415873] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.415873] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.415873] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4806e863-d259-4733-9cba-abb1deecd421 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.419215] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 844.419215] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5208544b-0a3a-ef68-43e5-a5b36cd72889" [ 844.419215] env[61839]: _type = "Task" [ 844.419215] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.429211] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5208544b-0a3a-ef68-43e5-a5b36cd72889, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.438363] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.438632] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.438792] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.438980] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.439151] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.439306] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.439513] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.439677] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.439847] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.440021] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.440205] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.441085] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d96fe1-a71b-453e-ab44-b4615398c745 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.453374] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1108a74a-1763-4377-85ba-474c808009d9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.466274] env[61839]: DEBUG nova.network.neutron [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updated VIF entry in instance network info cache for port 8d3be36a-f8da-4212-9f18-edefa1681f82. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.466637] env[61839]: DEBUG nova.network.neutron [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updating instance_info_cache with network_info: [{"id": "8d3be36a-f8da-4212-9f18-edefa1681f82", "address": "fa:16:3e:b8:82:1d", "network": {"id": "04db4c3e-de70-4119-8a0c-0b090344b55a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-422735768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "434f932a2a9e448d99fac449918affe9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d3be36a-f8", "ovs_interfaceid": "8d3be36a-f8da-4212-9f18-edefa1681f82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.526676] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314489, 'name': ReconfigVM_Task, 'duration_secs': 0.61406} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.527049] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 40c54d84-8e50-483a-b4e0-5f1cc72b0880/40c54d84-8e50-483a-b4e0-5f1cc72b0880.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.527761] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a01c00d1-d892-4e6d-9fdf-7f20a2c20700 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.536801] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 844.536801] env[61839]: value = "task-1314496" [ 844.536801] env[61839]: _type = "Task" [ 844.536801] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.542607] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314496, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.888365] env[61839]: DEBUG oslo_vmware.api [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Task: {'id': task-1314495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136585} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.888687] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.888905] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 844.889121] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 844.889324] env[61839]: INFO nova.compute.manager [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 844.889769] env[61839]: DEBUG oslo.service.loopingcall [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.889861] env[61839]: DEBUG nova.compute.manager [-] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 844.889933] env[61839]: DEBUG nova.network.neutron [-] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 844.904682] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.905231] env[61839]: DEBUG nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.911146] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.227s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.911146] env[61839]: INFO nova.compute.claims [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.932032] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5208544b-0a3a-ef68-43e5-a5b36cd72889, 'name': SearchDatastore_Task, 'duration_secs': 0.009693} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.932032] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.932032] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 844.932032] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.932261] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.932261] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.932317] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-650cc977-2dc5-440a-84ef-732edb839d9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.942808] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.944199] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.944466] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 844.949484] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a0a4e35-4980-4b97-85ae-c7991d489e36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.951273] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Successfully updated port: f7226659-074b-4580-8d02-5d637bbba250 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.952837] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 844.952837] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e353ee-8aaa-4e65-63e0-a5e4e99c8391" [ 844.952837] env[61839]: _type = "Task" [ 844.952837] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.965059] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e353ee-8aaa-4e65-63e0-a5e4e99c8391, 'name': SearchDatastore_Task, 'duration_secs': 0.009565} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.965671] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f97ba40-d6a2-470f-8cff-a098f7484879 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.969782] env[61839]: DEBUG oslo_concurrency.lockutils [req-5dd5bb0a-e3e7-4777-8f18-77e7b9e87b87 req-78dd2484-8828-4698-9fa2-c2b6c1c48b0a service nova] Releasing lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.973844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquired lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.974081] env[61839]: DEBUG nova.network.neutron [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.976635] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 844.976635] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526bb89f-3e8a-841c-92c9-25deab08f09d" [ 844.976635] env[61839]: _type = "Task" [ 844.976635] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.986337] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526bb89f-3e8a-841c-92c9-25deab08f09d, 'name': SearchDatastore_Task, 'duration_secs': 0.008455} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.986605] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.986975] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f/3f86a0d5-30fd-42cc-bd40-14bce9d0e56f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.987121] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f9159d4-faa7-4900-a8be-b7b9dfe30282 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.994597] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 844.994597] env[61839]: value = "task-1314497" [ 844.994597] env[61839]: _type = "Task" [ 844.994597] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.003767] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.020586] env[61839]: DEBUG nova.compute.manager [req-1f9379ae-8568-45ad-9556-8bb0969779d6 req-57589be9-9a28-4cf8-8e24-3a02a742ae82 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Received event network-vif-plugged-f7226659-074b-4580-8d02-5d637bbba250 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.021256] env[61839]: DEBUG oslo_concurrency.lockutils [req-1f9379ae-8568-45ad-9556-8bb0969779d6 req-57589be9-9a28-4cf8-8e24-3a02a742ae82 service nova] Acquiring lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.021256] env[61839]: DEBUG oslo_concurrency.lockutils [req-1f9379ae-8568-45ad-9556-8bb0969779d6 req-57589be9-9a28-4cf8-8e24-3a02a742ae82 service nova] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.021375] env[61839]: DEBUG oslo_concurrency.lockutils [req-1f9379ae-8568-45ad-9556-8bb0969779d6 req-57589be9-9a28-4cf8-8e24-3a02a742ae82 service nova] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.021555] env[61839]: DEBUG nova.compute.manager [req-1f9379ae-8568-45ad-9556-8bb0969779d6 req-57589be9-9a28-4cf8-8e24-3a02a742ae82 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] No waiting events found dispatching network-vif-plugged-f7226659-074b-4580-8d02-5d637bbba250 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 845.021794] env[61839]: WARNING nova.compute.manager [req-1f9379ae-8568-45ad-9556-8bb0969779d6 req-57589be9-9a28-4cf8-8e24-3a02a742ae82 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Received unexpected event network-vif-plugged-f7226659-074b-4580-8d02-5d637bbba250 for instance with vm_state building and task_state spawning. [ 845.046579] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314496, 'name': Rename_Task, 'duration_secs': 0.164953} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.047935] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.047935] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-076fb03e-ca42-4cc4-97a3-c39d4c83a353 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.055537] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 845.055537] env[61839]: value = "task-1314498" [ 845.055537] env[61839]: _type = "Task" [ 845.055537] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.067335] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314498, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.414474] env[61839]: DEBUG nova.compute.utils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.419307] env[61839]: DEBUG nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.419586] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 845.460064] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "refresh_cache-ce59c937-fc0b-464f-baaa-461c6f6c2d57" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.460424] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "refresh_cache-ce59c937-fc0b-464f-baaa-461c6f6c2d57" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.460514] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.480443] env[61839]: DEBUG nova.policy [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2cfaae5373da4d589df67d599d40366f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3d6228b03c4412695ef822ed618a27a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 845.505720] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314497, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508837} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.506015] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f/3f86a0d5-30fd-42cc-bd40-14bce9d0e56f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.506257] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.506521] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c0680c2-f56d-4a08-aa88-fa9d5d6357c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.513229] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 845.513229] env[61839]: value = "task-1314499" [ 845.513229] env[61839]: _type = "Task" [ 845.513229] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.522308] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.541739] env[61839]: DEBUG nova.network.neutron [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.567948] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314498, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.717780] env[61839]: DEBUG nova.compute.manager [req-c6205a29-0f92-4d63-8568-431ead9b9e02 req-de08eefd-317c-4c57-8b6a-ee8a9ca9f88c service nova] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Received event network-vif-deleted-6abe31af-55d3-43dc-b16e-b1e5d4c6f7dc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.718012] env[61839]: DEBUG nova.compute.manager [req-c6205a29-0f92-4d63-8568-431ead9b9e02 req-de08eefd-317c-4c57-8b6a-ee8a9ca9f88c service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Received event network-vif-deleted-32fd36fe-b511-4c88-9578-3d72ebf35d22 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.718498] env[61839]: INFO nova.compute.manager [req-c6205a29-0f92-4d63-8568-431ead9b9e02 req-de08eefd-317c-4c57-8b6a-ee8a9ca9f88c service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Neutron deleted interface 32fd36fe-b511-4c88-9578-3d72ebf35d22; detaching it from the instance and deleting it from the info cache [ 845.718684] env[61839]: DEBUG nova.network.neutron [req-c6205a29-0f92-4d63-8568-431ead9b9e02 req-de08eefd-317c-4c57-8b6a-ee8a9ca9f88c service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.878046] env[61839]: DEBUG nova.network.neutron [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updating instance_info_cache with network_info: [{"id": "8d3be36a-f8da-4212-9f18-edefa1681f82", "address": "fa:16:3e:b8:82:1d", "network": {"id": "04db4c3e-de70-4119-8a0c-0b090344b55a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-422735768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "434f932a2a9e448d99fac449918affe9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d3be36a-f8", "ovs_interfaceid": "8d3be36a-f8da-4212-9f18-edefa1681f82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.911751] env[61839]: DEBUG nova.network.neutron [-] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.919737] env[61839]: DEBUG nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.952151] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Successfully created port: 9b5b64c2-c5e6-4188-918b-22b9150363f0 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.007274] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.025677] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.321225} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.025961] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 846.026778] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4214981-1026-4fbb-872d-5a691158fc57 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.044946] env[61839]: INFO nova.compute.manager [-] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Took 1.64 seconds to deallocate network for instance. [ 846.054051] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f/3f86a0d5-30fd-42cc-bd40-14bce9d0e56f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.058076] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eaa5c4b-d8fb-4bf1-85f3-1b7589c5b5a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.087391] env[61839]: DEBUG oslo_vmware.api [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314498, 'name': PowerOnVM_Task, 'duration_secs': 0.673744} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.089314] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.089739] env[61839]: INFO nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Took 10.56 seconds to spawn the instance on the hypervisor. [ 846.089950] env[61839]: DEBUG nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 846.090321] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 846.090321] env[61839]: value = "task-1314500" [ 846.090321] env[61839]: _type = "Task" [ 846.090321] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.094460] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03289bf8-696e-448c-a77d-f37198345daf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.111104] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.221056] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d37a68d3-fe5f-4ddb-a3d1-0e3c9bd4c862 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.233547] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075db73a-20b2-4649-adf7-f4b761c8b57b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.263837] env[61839]: DEBUG nova.compute.manager [req-c6205a29-0f92-4d63-8568-431ead9b9e02 req-de08eefd-317c-4c57-8b6a-ee8a9ca9f88c service nova] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Detach interface failed, port_id=32fd36fe-b511-4c88-9578-3d72ebf35d22, reason: Instance 49d4720b-83e3-47d9-b727-5bb255de2e7c could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 846.345495] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fe2feb-fa45-403e-84f9-b8460464c7ed {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.353340] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69084584-c559-45bf-9b2e-a06c326a313c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.386293] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Releasing lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.388504] env[61839]: DEBUG nova.compute.manager [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 846.389522] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Updating instance_info_cache with network_info: [{"id": "f7226659-074b-4580-8d02-5d637bbba250", "address": "fa:16:3e:82:5b:39", "network": {"id": "1bb5337b-3280-442a-9e85-19ff81dca17a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-453333563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefa976ea98445f1b4e719d3e3a1e8af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7226659-07", "ovs_interfaceid": "f7226659-074b-4580-8d02-5d637bbba250", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.391055] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658cefad-0fc6-4026-bcaa-8f32089af34b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.394083] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abc8ad4-2f10-4d9d-8c2a-824b3ee9dafa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.404688] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffc18c1-9bd8-4f53-996e-b248a0c16bf5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.422327] env[61839]: INFO nova.compute.manager [-] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Took 1.53 seconds to deallocate network for instance. [ 846.423844] env[61839]: DEBUG nova.compute.provider_tree [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.557800] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Successfully created port: aa6c596a-924c-4f3b-b846-88212a1fdbfe {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.582935] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.607117] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314500, 'name': ReconfigVM_Task, 'duration_secs': 0.286803} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.607448] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f/3f86a0d5-30fd-42cc-bd40-14bce9d0e56f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.608093] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e28bb350-40b1-43d6-acab-a4af543e68f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.619970] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 846.619970] env[61839]: value = "task-1314502" [ 846.619970] env[61839]: _type = "Task" [ 846.619970] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.622501] env[61839]: INFO nova.compute.manager [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Took 28.92 seconds to build instance. [ 846.628926] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314502, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.898158] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "refresh_cache-ce59c937-fc0b-464f-baaa-461c6f6c2d57" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.898564] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Instance network_info: |[{"id": "f7226659-074b-4580-8d02-5d637bbba250", "address": "fa:16:3e:82:5b:39", "network": {"id": "1bb5337b-3280-442a-9e85-19ff81dca17a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-453333563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefa976ea98445f1b4e719d3e3a1e8af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7226659-07", "ovs_interfaceid": "f7226659-074b-4580-8d02-5d637bbba250", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 846.898999] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:5b:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7226659-074b-4580-8d02-5d637bbba250', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.907202] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Creating folder: Project (fefa976ea98445f1b4e719d3e3a1e8af). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.907900] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7db08727-a90f-4bbc-81df-3bbbb714bb4d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.918977] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Created folder: Project (fefa976ea98445f1b4e719d3e3a1e8af) in parent group-v281288. [ 846.919200] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Creating folder: Instances. Parent ref: group-v281365. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.919438] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e44d1f0b-c31a-4c62-9c33-8a4c61e1c6a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.927005] env[61839]: DEBUG nova.scheduler.client.report [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.931301] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Created folder: Instances in parent group-v281365. [ 846.931546] env[61839]: DEBUG oslo.service.loopingcall [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.932728] env[61839]: DEBUG nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.935591] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 846.935591] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.935929] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1c7707f-e623-49f7-89ce-01a24dbc5e8d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.960212] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.960212] env[61839]: value = "task-1314505" [ 846.960212] env[61839]: _type = "Task" [ 846.960212] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.962290] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.962519] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.962683] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.962870] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.963032] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.963196] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.963443] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.963614] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.963788] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.963987] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.964200] env[61839]: DEBUG nova.virt.hardware [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.965250] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aaba937-ef74-4c57-908a-c196e1c76135 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.977924] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c485f70-31f0-444f-bb75-284bdf0c9321 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.981560] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314505, 'name': CreateVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.052823] env[61839]: DEBUG nova.compute.manager [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Received event network-changed-f7226659-074b-4580-8d02-5d637bbba250 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.053178] env[61839]: DEBUG nova.compute.manager [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Refreshing instance network info cache due to event network-changed-f7226659-074b-4580-8d02-5d637bbba250. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 847.053475] env[61839]: DEBUG oslo_concurrency.lockutils [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] Acquiring lock "refresh_cache-ce59c937-fc0b-464f-baaa-461c6f6c2d57" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.053619] env[61839]: DEBUG oslo_concurrency.lockutils [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] Acquired lock "refresh_cache-ce59c937-fc0b-464f-baaa-461c6f6c2d57" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.053792] env[61839]: DEBUG nova.network.neutron [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Refreshing network info cache for port f7226659-074b-4580-8d02-5d637bbba250 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.125354] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cabdb6a6-9cdb-4077-8178-485aa41e5aff tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.714s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.132405] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314502, 'name': Rename_Task, 'duration_secs': 0.173903} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.132932] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.133245] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8f625d2-aea2-4a41-91f6-340f59ae197a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.139771] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 847.139771] env[61839]: value = "task-1314506" [ 847.139771] env[61839]: _type = "Task" [ 847.139771] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.149277] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.418060] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d49be8-5c8e-49cc-a8a8-f429d1461974 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.427142] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Doing hard reboot of VM {{(pid=61839) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 847.427142] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-4ee302dd-f2ec-4a46-b3a0-b36f75b66cf3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.436522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.436522] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 847.438093] env[61839]: DEBUG oslo_vmware.api [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 847.438093] env[61839]: value = "task-1314507" [ 847.438093] env[61839]: _type = "Task" [ 847.438093] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.438737] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.222s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.440612] env[61839]: INFO nova.compute.claims [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.453761] env[61839]: DEBUG oslo_vmware.api [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314507, 'name': ResetVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.477568] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314505, 'name': CreateVM_Task, 'duration_secs': 0.361438} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.479162] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.481725] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.482227] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.482712] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 847.483416] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f42c188-c307-41c2-9b57-e6236608be81 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.490126] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 847.490126] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5280b563-94b1-6dff-352e-41fabfb71c53" [ 847.490126] env[61839]: _type = "Task" [ 847.490126] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.501357] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5280b563-94b1-6dff-352e-41fabfb71c53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.633842] env[61839]: DEBUG nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 847.648866] env[61839]: DEBUG oslo_vmware.api [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314506, 'name': PowerOnVM_Task, 'duration_secs': 0.451771} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.651143] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.651362] env[61839]: INFO nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Took 7.62 seconds to spawn the instance on the hypervisor. [ 847.651544] env[61839]: DEBUG nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.652403] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025ee19f-dd4d-4898-9707-4336fed6a036 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.847652] env[61839]: DEBUG nova.network.neutron [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Updated VIF entry in instance network info cache for port f7226659-074b-4580-8d02-5d637bbba250. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.847652] env[61839]: DEBUG nova.network.neutron [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Updating instance_info_cache with network_info: [{"id": "f7226659-074b-4580-8d02-5d637bbba250", "address": "fa:16:3e:82:5b:39", "network": {"id": "1bb5337b-3280-442a-9e85-19ff81dca17a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-453333563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefa976ea98445f1b4e719d3e3a1e8af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7226659-07", "ovs_interfaceid": "f7226659-074b-4580-8d02-5d637bbba250", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.948118] env[61839]: DEBUG nova.compute.utils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 847.951662] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 847.952069] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.968306] env[61839]: DEBUG oslo_vmware.api [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314507, 'name': ResetVM_Task, 'duration_secs': 0.086947} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.968306] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Did hard reboot of VM {{(pid=61839) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 847.968306] env[61839]: DEBUG nova.compute.manager [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.974089] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1d1373-dfe2-4f3c-84cf-7925735aadb9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.004183] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5280b563-94b1-6dff-352e-41fabfb71c53, 'name': SearchDatastore_Task, 'duration_secs': 0.010747} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.004476] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.004711] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.004953] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.005129] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.005326] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.005592] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d95906c1-8a57-4048-aac3-4d2e72225e60 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.014269] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.014487] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.015440] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51d1c881-daef-416e-bef4-3b9795c5e212 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.020555] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 848.020555] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5232fff9-94bc-6670-09b8-d5b62c732865" [ 848.020555] env[61839]: _type = "Task" [ 848.020555] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.030207] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5232fff9-94bc-6670-09b8-d5b62c732865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.046174] env[61839]: DEBUG nova.policy [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9360f9fcba74a5aae33fdec3660b790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fefa976ea98445f1b4e719d3e3a1e8af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 848.163736] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.170140] env[61839]: INFO nova.compute.manager [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Took 25.51 seconds to build instance. [ 848.202436] env[61839]: DEBUG nova.compute.manager [req-a80160b2-4799-4d4b-95c1-fccfe4785370 req-dd5bde22-6d46-46f5-bd2e-f0e5a5d09d94 service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received event network-vif-plugged-9b5b64c2-c5e6-4188-918b-22b9150363f0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.202661] env[61839]: DEBUG oslo_concurrency.lockutils [req-a80160b2-4799-4d4b-95c1-fccfe4785370 req-dd5bde22-6d46-46f5-bd2e-f0e5a5d09d94 service nova] Acquiring lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.202870] env[61839]: DEBUG oslo_concurrency.lockutils [req-a80160b2-4799-4d4b-95c1-fccfe4785370 req-dd5bde22-6d46-46f5-bd2e-f0e5a5d09d94 service nova] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.203061] env[61839]: DEBUG oslo_concurrency.lockutils [req-a80160b2-4799-4d4b-95c1-fccfe4785370 req-dd5bde22-6d46-46f5-bd2e-f0e5a5d09d94 service nova] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.203241] env[61839]: DEBUG nova.compute.manager [req-a80160b2-4799-4d4b-95c1-fccfe4785370 req-dd5bde22-6d46-46f5-bd2e-f0e5a5d09d94 service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] No waiting events found dispatching network-vif-plugged-9b5b64c2-c5e6-4188-918b-22b9150363f0 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 848.203445] env[61839]: WARNING nova.compute.manager [req-a80160b2-4799-4d4b-95c1-fccfe4785370 req-dd5bde22-6d46-46f5-bd2e-f0e5a5d09d94 service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received unexpected event network-vif-plugged-9b5b64c2-c5e6-4188-918b-22b9150363f0 for instance with vm_state building and task_state spawning. [ 848.350675] env[61839]: DEBUG oslo_concurrency.lockutils [req-3bc76fed-1333-4549-895c-d9b39502489e req-42e31a01-7a8a-4f0b-9386-447d9884c7c0 service nova] Releasing lock "refresh_cache-ce59c937-fc0b-464f-baaa-461c6f6c2d57" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.436404] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Successfully updated port: 9b5b64c2-c5e6-4188-918b-22b9150363f0 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.454893] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 848.490967] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6ef67611-07bb-42ba-8d91-d171ea96d861 tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.080s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.532477] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5232fff9-94bc-6670-09b8-d5b62c732865, 'name': SearchDatastore_Task, 'duration_secs': 0.008785} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.533385] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39477b4a-59e5-4ece-9aee-da56b3e8b8b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.544135] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 848.544135] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5212837a-700f-7a4e-f0a5-db7a40d15931" [ 848.544135] env[61839]: _type = "Task" [ 848.544135] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.554259] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5212837a-700f-7a4e-f0a5-db7a40d15931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.561331] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Successfully created port: 38a6ad3a-a979-417b-a8ac-65232af41a58 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.671925] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4312ca92-2e7a-4f1e-9c69-7a382d955d89 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.511s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.782443] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9fd08f-bfa2-407b-9ffd-33b215cdf6ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.790614] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24edd5e-cd68-417b-8328-84ffb48a1741 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.826437] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5661b2f-3a51-4025-92df-281f4e95eda1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.834048] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a63107-3a5c-484a-a15b-c6e9c4143dbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.848816] env[61839]: DEBUG nova.compute.provider_tree [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.054652] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5212837a-700f-7a4e-f0a5-db7a40d15931, 'name': SearchDatastore_Task, 'duration_secs': 0.009876} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.055034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.055281] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] ce59c937-fc0b-464f-baaa-461c6f6c2d57/ce59c937-fc0b-464f-baaa-461c6f6c2d57.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.055504] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8bc0574c-fe2a-4c81-b2ba-fe27af4e05b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.063604] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 849.063604] env[61839]: value = "task-1314509" [ 849.063604] env[61839]: _type = "Task" [ 849.063604] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.072135] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.352977] env[61839]: DEBUG nova.scheduler.client.report [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 849.390361] env[61839]: DEBUG nova.compute.manager [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received event network-changed-4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 849.390638] env[61839]: DEBUG nova.compute.manager [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Refreshing instance network info cache due to event network-changed-4373753c-2ab4-4f61-8117-89f623225621. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 849.390917] env[61839]: DEBUG oslo_concurrency.lockutils [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] Acquiring lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.391830] env[61839]: DEBUG oslo_concurrency.lockutils [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] Acquired lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.392125] env[61839]: DEBUG nova.network.neutron [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Refreshing network info cache for port 4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.468614] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 849.497672] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 849.498033] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 849.498216] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.498409] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 849.498639] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.498813] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 849.499050] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 849.499257] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 849.499457] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 849.499629] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 849.499811] env[61839]: DEBUG nova.virt.hardware [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.500736] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9998f65c-d075-4990-9436-f81df4c03e22 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.510626] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b089d019-1afa-4458-85f0-ee6ed8b2ad6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.576870] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314509, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.862345] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.862345] env[61839]: DEBUG nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 849.863737] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.135s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.865563] env[61839]: INFO nova.compute.claims [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.075140] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627455} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.075140] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] ce59c937-fc0b-464f-baaa-461c6f6c2d57/ce59c937-fc0b-464f-baaa-461c6f6c2d57.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.075140] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.075377] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d327f790-ea30-4df5-93c5-e35967fd7745 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.081866] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 850.081866] env[61839]: value = "task-1314510" [ 850.081866] env[61839]: _type = "Task" [ 850.081866] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.089788] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.130898] env[61839]: DEBUG nova.network.neutron [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updated VIF entry in instance network info cache for port 4373753c-2ab4-4f61-8117-89f623225621. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.131454] env[61839]: DEBUG nova.network.neutron [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.232553] env[61839]: DEBUG nova.compute.manager [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received event network-changed-9b5b64c2-c5e6-4188-918b-22b9150363f0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.232799] env[61839]: DEBUG nova.compute.manager [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Refreshing instance network info cache due to event network-changed-9b5b64c2-c5e6-4188-918b-22b9150363f0. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 850.233189] env[61839]: DEBUG oslo_concurrency.lockutils [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] Acquiring lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.233371] env[61839]: DEBUG oslo_concurrency.lockutils [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] Acquired lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.233543] env[61839]: DEBUG nova.network.neutron [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Refreshing network info cache for port 9b5b64c2-c5e6-4188-918b-22b9150363f0 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.372326] env[61839]: DEBUG nova.compute.utils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 850.372877] env[61839]: DEBUG nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 850.373168] env[61839]: DEBUG nova.network.neutron [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.421381] env[61839]: DEBUG nova.policy [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72a11321692d4692af854eabe0aca25a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7a300fe2748456bb4a522a4d7c0d0f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.592660] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081188} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.592939] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.593736] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17eee3d1-a7f4-4432-8ae5-854bc2dbcd96 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.619480] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] ce59c937-fc0b-464f-baaa-461c6f6c2d57/ce59c937-fc0b-464f-baaa-461c6f6c2d57.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.619832] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6dd9927-35b8-4b1c-aed1-2a68e7c354fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.639055] env[61839]: DEBUG oslo_concurrency.lockutils [req-de04e409-38e6-461d-a25c-d4f86bc0b9a8 req-250bed9d-96ad-4842-92b3-af68dda36e74 service nova] Releasing lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.647618] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.647618] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.647618] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.647618] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.647823] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.650575] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 850.650575] env[61839]: value = "task-1314512" [ 850.650575] env[61839]: _type = "Task" [ 850.650575] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.650575] env[61839]: INFO nova.compute.manager [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Terminating instance [ 850.652012] env[61839]: DEBUG nova.compute.manager [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 850.652224] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.657507] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8df648-bbbd-4d71-9241-75dc78ca5228 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.667160] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.669472] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.669768] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d10b58bd-0314-48de-a7c0-e86b490443f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.677483] env[61839]: DEBUG oslo_vmware.api [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 850.677483] env[61839]: value = "task-1314513" [ 850.677483] env[61839]: _type = "Task" [ 850.677483] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.685978] env[61839]: DEBUG oslo_vmware.api [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.701182] env[61839]: DEBUG nova.network.neutron [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Successfully created port: 7792be0d-d99c-49ba-98bb-a2b56bf71b17 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.724117] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Successfully updated port: 38a6ad3a-a979-417b-a8ac-65232af41a58 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.772470] env[61839]: DEBUG nova.network.neutron [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.878587] env[61839]: DEBUG nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 850.930093] env[61839]: DEBUG nova.network.neutron [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.166358] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.188337] env[61839]: DEBUG oslo_vmware.api [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314513, 'name': PowerOffVM_Task, 'duration_secs': 0.222609} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.189498] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.189693] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 851.189956] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be411631-b1e9-4ba0-882f-14df0bc46c05 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.230303] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "refresh_cache-c180cc04-79da-4529-a905-1985a85b7cf5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.230476] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "refresh_cache-c180cc04-79da-4529-a905-1985a85b7cf5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.230645] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.238977] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c7c5b-f5d8-41b3-979b-7f309f29f8e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.250581] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203ff1b7-849d-47d7-98e4-5386b6889a11 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.254375] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 851.254588] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 851.254771] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Deleting the datastore file [datastore1] 62959833-5834-4c0a-bf4e-3ac1157b3b0c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 851.255346] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f117bf9e-a49b-4b59-a652-7a0b254e5c1d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.286321] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790fe929-86f5-4c9c-b60a-45cfc4773b12 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.288560] env[61839]: DEBUG oslo_vmware.api [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for the task: (returnval){ [ 851.288560] env[61839]: value = "task-1314515" [ 851.288560] env[61839]: _type = "Task" [ 851.288560] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.297209] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6981f59d-1bc7-4cd5-b253-00b1e6904a9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.304089] env[61839]: DEBUG oslo_vmware.api [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.314201] env[61839]: DEBUG nova.compute.provider_tree [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.328576] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Successfully updated port: aa6c596a-924c-4f3b-b846-88212a1fdbfe {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.432337] env[61839]: DEBUG oslo_concurrency.lockutils [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] Releasing lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.433810] env[61839]: DEBUG nova.compute.manager [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Received event network-changed-8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.433810] env[61839]: DEBUG nova.compute.manager [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Refreshing instance network info cache due to event network-changed-8d3be36a-f8da-4212-9f18-edefa1681f82. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 851.433810] env[61839]: DEBUG oslo_concurrency.lockutils [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] Acquiring lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.433810] env[61839]: DEBUG oslo_concurrency.lockutils [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] Acquired lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.433810] env[61839]: DEBUG nova.network.neutron [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Refreshing network info cache for port 8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.436920] env[61839]: DEBUG nova.compute.manager [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Received event network-changed-6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.438099] env[61839]: DEBUG nova.compute.manager [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Refreshing instance network info cache due to event network-changed-6ae0317a-8af8-438c-864f-d31b4d288dab. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 851.438386] env[61839]: DEBUG oslo_concurrency.lockutils [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] Acquiring lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.438591] env[61839]: DEBUG oslo_concurrency.lockutils [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] Acquired lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.438794] env[61839]: DEBUG nova.network.neutron [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Refreshing network info cache for port 6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.663400] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314512, 'name': ReconfigVM_Task, 'duration_secs': 0.825649} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.663746] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Reconfigured VM instance instance-00000047 to attach disk [datastore2] ce59c937-fc0b-464f-baaa-461c6f6c2d57/ce59c937-fc0b-464f-baaa-461c6f6c2d57.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.664445] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41bd793d-dbae-4014-8c2b-eb879f5704f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.670195] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 851.670195] env[61839]: value = "task-1314516" [ 851.670195] env[61839]: _type = "Task" [ 851.670195] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.678166] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314516, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.761748] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.798443] env[61839]: DEBUG oslo_vmware.api [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Task: {'id': task-1314515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41646} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.800777] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.800982] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.801214] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.801404] env[61839]: INFO nova.compute.manager [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 851.801655] env[61839]: DEBUG oslo.service.loopingcall [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.801862] env[61839]: DEBUG nova.compute.manager [-] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 851.801960] env[61839]: DEBUG nova.network.neutron [-] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.817790] env[61839]: DEBUG nova.scheduler.client.report [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.830407] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.830568] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquired lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.830874] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.896026] env[61839]: DEBUG nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 851.922312] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.922642] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.922838] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.923729] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.924030] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.924458] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.925897] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.926223] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.926644] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.927087] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.927543] env[61839]: DEBUG nova.virt.hardware [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.929152] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca32985-820f-4afc-b0fd-2a51e054c750 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.940503] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c8121b-8241-47b8-bfde-35dd3cb578e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.056568] env[61839]: DEBUG nova.network.neutron [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Updating instance_info_cache with network_info: [{"id": "38a6ad3a-a979-417b-a8ac-65232af41a58", "address": "fa:16:3e:e6:2b:8b", "network": {"id": "1bb5337b-3280-442a-9e85-19ff81dca17a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-453333563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefa976ea98445f1b4e719d3e3a1e8af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38a6ad3a-a9", "ovs_interfaceid": "38a6ad3a-a979-417b-a8ac-65232af41a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.182249] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314516, 'name': Rename_Task, 'duration_secs': 0.293444} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.184503] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.184782] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3882f79-8ce7-4701-a351-7d32b9ea2e09 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.192809] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 852.192809] env[61839]: value = "task-1314517" [ 852.192809] env[61839]: _type = "Task" [ 852.192809] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.202603] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314517, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.268370] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Received event network-vif-plugged-38a6ad3a-a979-417b-a8ac-65232af41a58 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.268593] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Acquiring lock "c180cc04-79da-4529-a905-1985a85b7cf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.268802] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Lock "c180cc04-79da-4529-a905-1985a85b7cf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.268966] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Lock "c180cc04-79da-4529-a905-1985a85b7cf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.269436] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] No waiting events found dispatching network-vif-plugged-38a6ad3a-a979-417b-a8ac-65232af41a58 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 852.269953] env[61839]: WARNING nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Received unexpected event network-vif-plugged-38a6ad3a-a979-417b-a8ac-65232af41a58 for instance with vm_state building and task_state spawning. [ 852.270159] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Received event network-changed-38a6ad3a-a979-417b-a8ac-65232af41a58 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.270401] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Refreshing instance network info cache due to event network-changed-38a6ad3a-a979-417b-a8ac-65232af41a58. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 852.273270] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Acquiring lock "refresh_cache-c180cc04-79da-4529-a905-1985a85b7cf5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.277107] env[61839]: DEBUG nova.network.neutron [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Successfully updated port: 7792be0d-d99c-49ba-98bb-a2b56bf71b17 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.324939] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.325777] env[61839]: DEBUG nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 852.334095] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.774s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.334639] env[61839]: DEBUG nova.objects.instance [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lazy-loading 'resources' on Instance uuid 406da948-71c7-4c28-9ee3-10af64b1ab51 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 852.381324] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.558273] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "refresh_cache-c180cc04-79da-4529-a905-1985a85b7cf5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.558665] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Instance network_info: |[{"id": "38a6ad3a-a979-417b-a8ac-65232af41a58", "address": "fa:16:3e:e6:2b:8b", "network": {"id": "1bb5337b-3280-442a-9e85-19ff81dca17a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-453333563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefa976ea98445f1b4e719d3e3a1e8af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38a6ad3a-a9", "ovs_interfaceid": "38a6ad3a-a979-417b-a8ac-65232af41a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 852.558988] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Acquired lock "refresh_cache-c180cc04-79da-4529-a905-1985a85b7cf5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.559607] env[61839]: DEBUG nova.network.neutron [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Refreshing network info cache for port 38a6ad3a-a979-417b-a8ac-65232af41a58 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 852.560567] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:2b:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38a6ad3a-a979-417b-a8ac-65232af41a58', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.575999] env[61839]: DEBUG oslo.service.loopingcall [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.581735] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.582564] env[61839]: DEBUG nova.network.neutron [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updated VIF entry in instance network info cache for port 8d3be36a-f8da-4212-9f18-edefa1681f82. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.582884] env[61839]: DEBUG nova.network.neutron [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updating instance_info_cache with network_info: [{"id": "8d3be36a-f8da-4212-9f18-edefa1681f82", "address": "fa:16:3e:b8:82:1d", "network": {"id": "04db4c3e-de70-4119-8a0c-0b090344b55a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-422735768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "434f932a2a9e448d99fac449918affe9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d3be36a-f8", "ovs_interfaceid": "8d3be36a-f8da-4212-9f18-edefa1681f82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.585054] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe6ae6de-8d50-45db-83ca-6e66553dd9ed {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.611109] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.611109] env[61839]: value = "task-1314518" [ 852.611109] env[61839]: _type = "Task" [ 852.611109] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.621207] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314518, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.676188] env[61839]: DEBUG nova.network.neutron [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Updated VIF entry in instance network info cache for port 6ae0317a-8af8-438c-864f-d31b4d288dab. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.676637] env[61839]: DEBUG nova.network.neutron [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Updating instance_info_cache with network_info: [{"id": "6ae0317a-8af8-438c-864f-d31b4d288dab", "address": "fa:16:3e:0b:3e:68", "network": {"id": "9ad4a7c4-51fa-42e2-927a-24d25b423b8b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1594396457-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d28bf7713204dfb9682d9c002cb5449", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae0317a-8a", "ovs_interfaceid": "6ae0317a-8af8-438c-864f-d31b4d288dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.704858] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314517, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.724140] env[61839]: DEBUG nova.network.neutron [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Updating instance_info_cache with network_info: [{"id": "9b5b64c2-c5e6-4188-918b-22b9150363f0", "address": "fa:16:3e:bb:14:9c", "network": {"id": "c9ba50a0-6002-4795-a200-f59936edfe3a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1274347994", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3d6228b03c4412695ef822ed618a27a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b5b64c2-c5", "ovs_interfaceid": "9b5b64c2-c5e6-4188-918b-22b9150363f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa6c596a-924c-4f3b-b846-88212a1fdbfe", "address": "fa:16:3e:6e:bc:bf", "network": {"id": "3b0c3542-7683-45fe-9fca-11e563765d31", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-23398633", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "b3d6228b03c4412695ef822ed618a27a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6c596a-92", "ovs_interfaceid": "aa6c596a-924c-4f3b-b846-88212a1fdbfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.781091] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "refresh_cache-c996d7db-4b73-4445-9989-4efb2cd852e8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.781091] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "refresh_cache-c996d7db-4b73-4445-9989-4efb2cd852e8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.781091] env[61839]: DEBUG nova.network.neutron [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.835323] env[61839]: DEBUG nova.compute.utils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.836880] env[61839]: DEBUG nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 852.837094] env[61839]: DEBUG nova.network.neutron [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.891621] env[61839]: DEBUG nova.network.neutron [-] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.893844] env[61839]: DEBUG nova.policy [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de344d8cc13340d7affed971d75f486d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '251b0d2531ba4f14a2eb6ea75382c418', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 853.084986] env[61839]: DEBUG oslo_concurrency.lockutils [req-68950cf8-472a-4fd3-94d4-b97f4b57796b req-1fbc74d9-d2d3-4d5c-a028-c081ecebf80c service nova] Releasing lock "refresh_cache-62959833-5834-4c0a-bf4e-3ac1157b3b0c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.124060] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314518, 'name': CreateVM_Task, 'duration_secs': 0.403956} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.124384] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.125074] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.125279] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.125603] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 853.125865] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-799cc91e-9e8b-4037-9ab5-d6c7560764e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.132938] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 853.132938] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5240624a-a22e-5e89-dd9e-b805924f66b8" [ 853.132938] env[61839]: _type = "Task" [ 853.132938] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.141708] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5240624a-a22e-5e89-dd9e-b805924f66b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.157032] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c1e538-5cd2-43bf-8736-22330385670d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.172981] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7baabdf2-ffb8-40e6-9be5-4b30f98af6bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.179967] env[61839]: DEBUG nova.network.neutron [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Successfully created port: b7032df2-ea05-48a6-9ba7-368194a96584 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.182930] env[61839]: DEBUG oslo_concurrency.lockutils [req-28196252-2d28-4060-b419-dbfd0eb5720f req-10deb077-0681-4df9-891b-f7f577d4f111 service nova] Releasing lock "refresh_cache-3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.226655] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b98f17-372b-4c24-ad66-366b15f2b870 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.229399] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Releasing lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.229738] env[61839]: DEBUG nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Instance network_info: |[{"id": "9b5b64c2-c5e6-4188-918b-22b9150363f0", "address": "fa:16:3e:bb:14:9c", "network": {"id": "c9ba50a0-6002-4795-a200-f59936edfe3a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1274347994", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3d6228b03c4412695ef822ed618a27a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b5b64c2-c5", "ovs_interfaceid": "9b5b64c2-c5e6-4188-918b-22b9150363f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa6c596a-924c-4f3b-b846-88212a1fdbfe", "address": "fa:16:3e:6e:bc:bf", "network": {"id": "3b0c3542-7683-45fe-9fca-11e563765d31", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-23398633", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "b3d6228b03c4412695ef822ed618a27a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6c596a-92", "ovs_interfaceid": "aa6c596a-924c-4f3b-b846-88212a1fdbfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 853.230824] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:14:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4734e5e-2a76-4bda-8905-70c9bf9e007f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b5b64c2-c5e6-4188-918b-22b9150363f0', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:bc:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b00fe87c-d828-442f-bd09-e9018c468557', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa6c596a-924c-4f3b-b846-88212a1fdbfe', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.241623] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Creating folder: Project (b3d6228b03c4412695ef822ed618a27a). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.245524] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e13f8fbe-1838-4fac-b757-dd91596129d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.247341] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314517, 'name': PowerOnVM_Task, 'duration_secs': 0.668604} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.250080] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.250304] env[61839]: INFO nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Took 8.86 seconds to spawn the instance on the hypervisor. [ 853.250539] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 853.251737] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea66b5b-ff96-4175-b022-f254be3a4e05 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.255620] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0b8411-c2c2-4b8f-984b-f3748c1986a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.264681] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Created folder: Project (b3d6228b03c4412695ef822ed618a27a) in parent group-v281288. [ 853.264872] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Creating folder: Instances. Parent ref: group-v281369. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.265197] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0338c84-c68b-4f4e-b3cb-ff541a882cd2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.276374] env[61839]: DEBUG nova.compute.provider_tree [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.300047] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Created folder: Instances in parent group-v281369. [ 853.300047] env[61839]: DEBUG oslo.service.loopingcall [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.300047] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.300047] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00c9ab17-85f0-4d0c-bc70-efc2ad076a5a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.325086] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.325086] env[61839]: value = "task-1314521" [ 853.325086] env[61839]: _type = "Task" [ 853.325086] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.335568] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314521, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.340638] env[61839]: DEBUG nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 853.353371] env[61839]: DEBUG nova.network.neutron [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.396371] env[61839]: INFO nova.compute.manager [-] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Took 1.59 seconds to deallocate network for instance. [ 853.462273] env[61839]: DEBUG nova.network.neutron [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Updated VIF entry in instance network info cache for port 38a6ad3a-a979-417b-a8ac-65232af41a58. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.462727] env[61839]: DEBUG nova.network.neutron [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Updating instance_info_cache with network_info: [{"id": "38a6ad3a-a979-417b-a8ac-65232af41a58", "address": "fa:16:3e:e6:2b:8b", "network": {"id": "1bb5337b-3280-442a-9e85-19ff81dca17a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-453333563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefa976ea98445f1b4e719d3e3a1e8af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38a6ad3a-a9", "ovs_interfaceid": "38a6ad3a-a979-417b-a8ac-65232af41a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.472862] env[61839]: DEBUG nova.compute.manager [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Received event network-vif-plugged-7792be0d-d99c-49ba-98bb-a2b56bf71b17 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.473584] env[61839]: DEBUG oslo_concurrency.lockutils [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] Acquiring lock "c996d7db-4b73-4445-9989-4efb2cd852e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.474278] env[61839]: DEBUG oslo_concurrency.lockutils [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.475245] env[61839]: DEBUG oslo_concurrency.lockutils [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.475575] env[61839]: DEBUG nova.compute.manager [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] No waiting events found dispatching network-vif-plugged-7792be0d-d99c-49ba-98bb-a2b56bf71b17 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 853.475865] env[61839]: WARNING nova.compute.manager [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Received unexpected event network-vif-plugged-7792be0d-d99c-49ba-98bb-a2b56bf71b17 for instance with vm_state building and task_state spawning. [ 853.476165] env[61839]: DEBUG nova.compute.manager [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Received event network-changed-7792be0d-d99c-49ba-98bb-a2b56bf71b17 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.477986] env[61839]: DEBUG nova.compute.manager [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Refreshing instance network info cache due to event network-changed-7792be0d-d99c-49ba-98bb-a2b56bf71b17. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 853.477986] env[61839]: DEBUG oslo_concurrency.lockutils [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] Acquiring lock "refresh_cache-c996d7db-4b73-4445-9989-4efb2cd852e8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.599967] env[61839]: DEBUG nova.network.neutron [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Updating instance_info_cache with network_info: [{"id": "7792be0d-d99c-49ba-98bb-a2b56bf71b17", "address": "fa:16:3e:fb:3f:21", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7792be0d-d9", "ovs_interfaceid": "7792be0d-d99c-49ba-98bb-a2b56bf71b17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.644541] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5240624a-a22e-5e89-dd9e-b805924f66b8, 'name': SearchDatastore_Task, 'duration_secs': 0.025381} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.645810] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.646364] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.646843] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.650295] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.650295] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.650295] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8866b5dd-7c00-4c16-a896-7866a83663b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.663344] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.663344] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.663344] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4e1bccc-446b-48a2-b66b-5533804c5016 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.669696] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 853.669696] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b57b1-9300-a62f-d9d4-8d3e3f4c2e40" [ 853.669696] env[61839]: _type = "Task" [ 853.669696] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.681030] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b57b1-9300-a62f-d9d4-8d3e3f4c2e40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.782159] env[61839]: DEBUG nova.scheduler.client.report [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.796354] env[61839]: INFO nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Took 27.44 seconds to build instance. [ 853.839558] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314521, 'name': CreateVM_Task, 'duration_secs': 0.39851} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.840013] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.841366] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.841642] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.842091] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 853.842442] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33d2ec3f-430c-4e86-b09f-649e3b66df5a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.851763] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 853.851763] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528da1c5-de98-2b97-4c74-a4ee27fa2258" [ 853.851763] env[61839]: _type = "Task" [ 853.851763] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.860127] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528da1c5-de98-2b97-4c74-a4ee27fa2258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.903183] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.968210] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Releasing lock "refresh_cache-c180cc04-79da-4529-a905-1985a85b7cf5" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.968536] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received event network-vif-plugged-aa6c596a-924c-4f3b-b846-88212a1fdbfe {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.968738] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Acquiring lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.969125] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.969176] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.969317] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] No waiting events found dispatching network-vif-plugged-aa6c596a-924c-4f3b-b846-88212a1fdbfe {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 853.969601] env[61839]: WARNING nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received unexpected event network-vif-plugged-aa6c596a-924c-4f3b-b846-88212a1fdbfe for instance with vm_state building and task_state spawning. [ 853.969818] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received event network-changed-aa6c596a-924c-4f3b-b846-88212a1fdbfe {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.969989] env[61839]: DEBUG nova.compute.manager [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Refreshing instance network info cache due to event network-changed-aa6c596a-924c-4f3b-b846-88212a1fdbfe. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 853.970209] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Acquiring lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.970358] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Acquired lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.970745] env[61839]: DEBUG nova.network.neutron [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Refreshing network info cache for port aa6c596a-924c-4f3b-b846-88212a1fdbfe {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.101627] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "refresh_cache-c996d7db-4b73-4445-9989-4efb2cd852e8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.102107] env[61839]: DEBUG nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Instance network_info: |[{"id": "7792be0d-d99c-49ba-98bb-a2b56bf71b17", "address": "fa:16:3e:fb:3f:21", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7792be0d-d9", "ovs_interfaceid": "7792be0d-d99c-49ba-98bb-a2b56bf71b17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 854.102565] env[61839]: DEBUG oslo_concurrency.lockutils [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] Acquired lock "refresh_cache-c996d7db-4b73-4445-9989-4efb2cd852e8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.102823] env[61839]: DEBUG nova.network.neutron [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Refreshing network info cache for port 7792be0d-d99c-49ba-98bb-a2b56bf71b17 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.104047] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:3f:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7792be0d-d99c-49ba-98bb-a2b56bf71b17', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.112028] env[61839]: DEBUG oslo.service.loopingcall [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.112716] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.112890] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c278923f-0cf5-44ae-b7ac-b6b556014dbc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.135489] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.135489] env[61839]: value = "task-1314522" [ 854.135489] env[61839]: _type = "Task" [ 854.135489] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.145788] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314522, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.183345] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b57b1-9300-a62f-d9d4-8d3e3f4c2e40, 'name': SearchDatastore_Task, 'duration_secs': 0.011033} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.184290] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1fa4be9-5d47-4df8-8810-a759beb0f23b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.190842] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 854.190842] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a3093-24e6-025a-91d2-300c2fa62312" [ 854.190842] env[61839]: _type = "Task" [ 854.190842] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.200432] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a3093-24e6-025a-91d2-300c2fa62312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.291135] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.957s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.293666] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.338s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.295345] env[61839]: INFO nova.compute.claims [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.299803] env[61839]: DEBUG nova.compute.manager [req-d6af9ecd-7604-42e6-9f79-8fd76a5c6d03 req-72f61273-7efb-45ca-916e-806ccba35090 service nova] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Received event network-vif-deleted-8d3be36a-f8da-4212-9f18-edefa1681f82 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.300732] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.715s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.311797] env[61839]: INFO nova.scheduler.client.report [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Deleted allocations for instance 406da948-71c7-4c28-9ee3-10af64b1ab51 [ 854.350454] env[61839]: DEBUG nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 854.373523] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528da1c5-de98-2b97-4c74-a4ee27fa2258, 'name': SearchDatastore_Task, 'duration_secs': 0.013508} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.373863] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.374178] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 854.374427] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.385159] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 854.385424] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 854.385588] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.385777] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 854.385934] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.386161] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 854.386396] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 854.386566] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 854.386739] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 854.386906] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 854.387107] env[61839]: DEBUG nova.virt.hardware [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 854.388276] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70c0a8b-8ca4-496b-879d-dafd3777e79b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.397679] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564b59be-ee0e-402f-92be-ec9b82634c87 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.647645] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314522, 'name': CreateVM_Task, 'duration_secs': 0.357553} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.647859] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.648675] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.648742] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.649079] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 854.649375] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aecc85a2-961f-4c33-9c5b-43cfcce45c6c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.655253] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 854.655253] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529798f7-e665-4f90-3fe4-34481f7831ee" [ 854.655253] env[61839]: _type = "Task" [ 854.655253] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.664257] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529798f7-e665-4f90-3fe4-34481f7831ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.668670] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquiring lock "d187e75f-39a9-467b-b5ef-e2772d9b71af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.668930] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.702214] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a3093-24e6-025a-91d2-300c2fa62312, 'name': SearchDatastore_Task, 'duration_secs': 0.014019} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.702507] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.702748] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] c180cc04-79da-4529-a905-1985a85b7cf5/c180cc04-79da-4529-a905-1985a85b7cf5.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.703050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.703208] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.703437] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05fe5f34-ce4e-4911-9c88-c53f0dbc23ff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.705605] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-775cb66f-eec2-4034-a1db-fbed45290ea2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.714080] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 854.714080] env[61839]: value = "task-1314523" [ 854.714080] env[61839]: _type = "Task" [ 854.714080] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.718535] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.718729] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 854.719880] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77a38b51-77ad-45de-83f1-61fd85920489 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.727037] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314523, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.730454] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 854.730454] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e7eb80-fd0b-ccbf-4803-7213319ae086" [ 854.730454] env[61839]: _type = "Task" [ 854.730454] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.739315] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e7eb80-fd0b-ccbf-4803-7213319ae086, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.744964] env[61839]: DEBUG nova.network.neutron [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Successfully updated port: b7032df2-ea05-48a6-9ba7-368194a96584 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.820260] env[61839]: DEBUG oslo_concurrency.lockutils [None req-eaf3cde2-930c-48e4-9dfc-5ed5eef56c0b tempest-ServerMetadataNegativeTestJSON-1868427009 tempest-ServerMetadataNegativeTestJSON-1868427009-project-member] Lock "406da948-71c7-4c28-9ee3-10af64b1ab51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.201s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.869408] env[61839]: DEBUG nova.network.neutron [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Updated VIF entry in instance network info cache for port aa6c596a-924c-4f3b-b846-88212a1fdbfe. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.869851] env[61839]: DEBUG nova.network.neutron [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Updating instance_info_cache with network_info: [{"id": "9b5b64c2-c5e6-4188-918b-22b9150363f0", "address": "fa:16:3e:bb:14:9c", "network": {"id": "c9ba50a0-6002-4795-a200-f59936edfe3a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1274347994", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3d6228b03c4412695ef822ed618a27a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b5b64c2-c5", "ovs_interfaceid": "9b5b64c2-c5e6-4188-918b-22b9150363f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa6c596a-924c-4f3b-b846-88212a1fdbfe", "address": "fa:16:3e:6e:bc:bf", "network": {"id": "3b0c3542-7683-45fe-9fca-11e563765d31", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-23398633", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "b3d6228b03c4412695ef822ed618a27a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6c596a-92", "ovs_interfaceid": "aa6c596a-924c-4f3b-b846-88212a1fdbfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.168986] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529798f7-e665-4f90-3fe4-34481f7831ee, 'name': SearchDatastore_Task, 'duration_secs': 0.027406} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.169698] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.170044] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.170300] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.170456] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.170716] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.171132] env[61839]: DEBUG nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 855.173978] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45f9f9c8-0c06-4bbc-a74b-05f5425e1837 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.193775] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.194084] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.194920] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eeee0681-6c20-4a42-b240-6c1b4285aa5b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.202508] env[61839]: DEBUG nova.network.neutron [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Updated VIF entry in instance network info cache for port 7792be0d-d99c-49ba-98bb-a2b56bf71b17. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.203183] env[61839]: DEBUG nova.network.neutron [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Updating instance_info_cache with network_info: [{"id": "7792be0d-d99c-49ba-98bb-a2b56bf71b17", "address": "fa:16:3e:fb:3f:21", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7792be0d-d9", "ovs_interfaceid": "7792be0d-d99c-49ba-98bb-a2b56bf71b17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.205393] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 855.205393] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52728a7d-ead5-2ce5-6cfe-28e47fd05d32" [ 855.205393] env[61839]: _type = "Task" [ 855.205393] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.217138] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52728a7d-ead5-2ce5-6cfe-28e47fd05d32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.228755] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314523, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.242057] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e7eb80-fd0b-ccbf-4803-7213319ae086, 'name': SearchDatastore_Task, 'duration_secs': 0.011619} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.243365] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-349a4568-43a8-4ad0-9027-136c1c542013 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.247200] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "refresh_cache-ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.247373] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "refresh_cache-ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.247535] env[61839]: DEBUG nova.network.neutron [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.250644] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 855.250644] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bbcf56-2134-fe0f-42db-c537f56b7337" [ 855.250644] env[61839]: _type = "Task" [ 855.250644] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.262228] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bbcf56-2134-fe0f-42db-c537f56b7337, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.375136] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a7b3e1f-3766-4f90-86d8-26f32a4b8f62 req-be6a63c0-61c6-4f7a-ab80-728980ed564f service nova] Releasing lock "refresh_cache-2cb53e37-8b0b-48b7-a973-061b91df46df" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.502341] env[61839]: DEBUG nova.compute.manager [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Received event network-vif-plugged-b7032df2-ea05-48a6-9ba7-368194a96584 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.502341] env[61839]: DEBUG oslo_concurrency.lockutils [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] Acquiring lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.502341] env[61839]: DEBUG oslo_concurrency.lockutils [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.502915] env[61839]: DEBUG oslo_concurrency.lockutils [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.503236] env[61839]: DEBUG nova.compute.manager [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] No waiting events found dispatching network-vif-plugged-b7032df2-ea05-48a6-9ba7-368194a96584 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 855.503536] env[61839]: WARNING nova.compute.manager [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Received unexpected event network-vif-plugged-b7032df2-ea05-48a6-9ba7-368194a96584 for instance with vm_state building and task_state spawning. [ 855.504256] env[61839]: DEBUG nova.compute.manager [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Received event network-changed-b7032df2-ea05-48a6-9ba7-368194a96584 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.504682] env[61839]: DEBUG nova.compute.manager [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Refreshing instance network info cache due to event network-changed-b7032df2-ea05-48a6-9ba7-368194a96584. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 855.504965] env[61839]: DEBUG oslo_concurrency.lockutils [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] Acquiring lock "refresh_cache-ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.642728] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8601b080-688a-4b79-93cb-ddf059ae9b7d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.653277] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3510d8f5-7709-4d19-9e69-e99e80e2285c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.694865] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d849e6-3d61-4258-9987-c9508f58d071 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.704434] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31328ad0-1056-4ddf-9621-4158a2b1df47 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.709382] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.709870] env[61839]: DEBUG oslo_concurrency.lockutils [req-a9cf5b32-62f6-45d9-902b-8bb45c920c4b req-470e97d1-0cfe-4b1d-b619-0abafebe1464 service nova] Releasing lock "refresh_cache-c996d7db-4b73-4445-9989-4efb2cd852e8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.728316] env[61839]: DEBUG nova.compute.provider_tree [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.729480] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52728a7d-ead5-2ce5-6cfe-28e47fd05d32, 'name': SearchDatastore_Task, 'duration_secs': 0.025683} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.734097] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-004c3635-ec74-4ef9-9dd4-17702142d3dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.744401] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314523, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687004} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.744848] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 855.744848] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5298d24d-6c24-ea5d-baee-b6cd5fa7c009" [ 855.744848] env[61839]: _type = "Task" [ 855.744848] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.745642] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] c180cc04-79da-4529-a905-1985a85b7cf5/c180cc04-79da-4529-a905-1985a85b7cf5.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.745642] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.745878] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7fdc5d4-8236-48f1-8878-cc1bf333bd72 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.760785] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 855.760785] env[61839]: value = "task-1314524" [ 855.760785] env[61839]: _type = "Task" [ 855.760785] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.769450] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5298d24d-6c24-ea5d-baee-b6cd5fa7c009, 'name': SearchDatastore_Task, 'duration_secs': 0.013063} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.769450] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bbcf56-2134-fe0f-42db-c537f56b7337, 'name': SearchDatastore_Task, 'duration_secs': 0.059978} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.772870] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.773160] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] c996d7db-4b73-4445-9989-4efb2cd852e8/c996d7db-4b73-4445-9989-4efb2cd852e8.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 855.773452] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.773773] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 2cb53e37-8b0b-48b7-a973-061b91df46df/2cb53e37-8b0b-48b7-a973-061b91df46df.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 855.773931] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86a14a97-e4d8-4fdb-bbb4-724dc3a08332 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.776255] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15bf9214-7561-4e82-8f62-8438a73c771d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.784521] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314524, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.787234] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 855.787234] env[61839]: value = "task-1314525" [ 855.787234] env[61839]: _type = "Task" [ 855.787234] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.787526] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 855.787526] env[61839]: value = "task-1314526" [ 855.787526] env[61839]: _type = "Task" [ 855.787526] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.802403] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314525, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.802731] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.845347] env[61839]: DEBUG nova.network.neutron [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 856.086208] env[61839]: DEBUG nova.network.neutron [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Updating instance_info_cache with network_info: [{"id": "b7032df2-ea05-48a6-9ba7-368194a96584", "address": "fa:16:3e:9f:98:d1", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7032df2-ea", "ovs_interfaceid": "b7032df2-ea05-48a6-9ba7-368194a96584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.237733] env[61839]: DEBUG nova.scheduler.client.report [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.282029] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314524, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095116} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.282029] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.283256] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702ca63e-e0dc-4573-9388-75318720f8d5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.334530] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] c180cc04-79da-4529-a905-1985a85b7cf5/c180cc04-79da-4529-a905-1985a85b7cf5.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.335833] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-212f4f26-9bbb-4c00-90c4-f1f99881d9ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.356905] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314525, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.357416] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314526, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.365485] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 856.365485] env[61839]: value = "task-1314527" [ 856.365485] env[61839]: _type = "Task" [ 856.365485] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.375638] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.590754] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "refresh_cache-ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.591054] env[61839]: DEBUG nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Instance network_info: |[{"id": "b7032df2-ea05-48a6-9ba7-368194a96584", "address": "fa:16:3e:9f:98:d1", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7032df2-ea", "ovs_interfaceid": "b7032df2-ea05-48a6-9ba7-368194a96584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 856.591407] env[61839]: DEBUG oslo_concurrency.lockutils [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] Acquired lock "refresh_cache-ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.591593] env[61839]: DEBUG nova.network.neutron [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Refreshing network info cache for port b7032df2-ea05-48a6-9ba7-368194a96584 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.592842] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:98:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7032df2-ea05-48a6-9ba7-368194a96584', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.606479] env[61839]: DEBUG oslo.service.loopingcall [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.609806] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.610422] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e0221c1-d2ed-463b-8880-ed019939f337 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.635785] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.635785] env[61839]: value = "task-1314528" [ 856.635785] env[61839]: _type = "Task" [ 856.635785] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.646279] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314528, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.744546] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.745669] env[61839]: DEBUG nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 856.748519] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.681s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.748764] env[61839]: DEBUG nova.objects.instance [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 856.817487] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314525, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.817866] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314526, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.884073] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.126586] env[61839]: DEBUG nova.network.neutron [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Updated VIF entry in instance network info cache for port b7032df2-ea05-48a6-9ba7-368194a96584. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.127234] env[61839]: DEBUG nova.network.neutron [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Updating instance_info_cache with network_info: [{"id": "b7032df2-ea05-48a6-9ba7-368194a96584", "address": "fa:16:3e:9f:98:d1", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7032df2-ea", "ovs_interfaceid": "b7032df2-ea05-48a6-9ba7-368194a96584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.150826] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314528, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.255028] env[61839]: DEBUG nova.compute.utils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.259227] env[61839]: DEBUG nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 857.259227] env[61839]: DEBUG nova.network.neutron [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.305379] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314525, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.390034} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.309010] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] c996d7db-4b73-4445-9989-4efb2cd852e8/c996d7db-4b73-4445-9989-4efb2cd852e8.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.312023] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.312023] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314526, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.231376} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.312023] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ed5b3fa-a5ff-4c89-9e84-eeb42d13394c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.313191] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 2cb53e37-8b0b-48b7-a973-061b91df46df/2cb53e37-8b0b-48b7-a973-061b91df46df.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.313828] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.314298] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ccf3618e-f516-4d0c-82a2-e65de15e20df {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.329476] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 857.329476] env[61839]: value = "task-1314530" [ 857.329476] env[61839]: _type = "Task" [ 857.329476] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.329923] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 857.329923] env[61839]: value = "task-1314529" [ 857.329923] env[61839]: _type = "Task" [ 857.329923] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.339653] env[61839]: DEBUG nova.policy [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e327fdc367e49559dc1ef4862ca1e2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '223d94c193814f649b5d1f35e3756071', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 857.347893] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.350806] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.378952] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.630155] env[61839]: DEBUG oslo_concurrency.lockutils [req-9195ac2e-3671-4aab-b538-278d87d9aeb3 req-9fc4375c-305b-4459-a00a-7eea572eaafc service nova] Releasing lock "refresh_cache-ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.647830] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314528, 'name': CreateVM_Task, 'duration_secs': 0.683299} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.648139] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.648944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.649207] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.649613] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 857.649945] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f5acc17-433d-4401-98b7-2172e5582927 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.660274] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 857.660274] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522cb7ac-6ed9-a45e-77f6-002c321e7d5f" [ 857.660274] env[61839]: _type = "Task" [ 857.660274] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.666740] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522cb7ac-6ed9-a45e-77f6-002c321e7d5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.757509] env[61839]: DEBUG nova.network.neutron [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Successfully created port: ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.762405] env[61839]: DEBUG nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 857.765703] env[61839]: DEBUG oslo_concurrency.lockutils [None req-29672a53-c7e3-4fb5-8183-aa50247b3c95 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.766796] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.213s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.768207] env[61839]: INFO nova.compute.claims [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.846831] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314530, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.261208} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.847542] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155541} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.847749] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.848172] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.848921] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f473bcd-33e5-4a55-8c4d-b59d561fc129 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.852157] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d849fdf-8c6c-487e-99c0-e7e9ae1b12bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.892127] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 2cb53e37-8b0b-48b7-a973-061b91df46df/2cb53e37-8b0b-48b7-a973-061b91df46df.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.902772] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] c996d7db-4b73-4445-9989-4efb2cd852e8/c996d7db-4b73-4445-9989-4efb2cd852e8.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.906312] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf65d3f6-e457-4a80-8ba6-1deae25d815f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.922583] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a2c2d95-971d-43db-9de5-927d3af0322d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.946035] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314527, 'name': ReconfigVM_Task, 'duration_secs': 1.34215} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.949538] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Reconfigured VM instance instance-00000048 to attach disk [datastore2] c180cc04-79da-4529-a905-1985a85b7cf5/c180cc04-79da-4529-a905-1985a85b7cf5.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.949538] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 857.949538] env[61839]: value = "task-1314532" [ 857.949538] env[61839]: _type = "Task" [ 857.949538] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.949929] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 857.949929] env[61839]: value = "task-1314531" [ 857.949929] env[61839]: _type = "Task" [ 857.949929] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.950079] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94ed2bc2-485b-4d31-81a4-a984dce804b1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.963226] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 857.963226] env[61839]: value = "task-1314533" [ 857.963226] env[61839]: _type = "Task" [ 857.963226] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.969684] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314532, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.969999] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314531, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.981311] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314533, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.169042] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522cb7ac-6ed9-a45e-77f6-002c321e7d5f, 'name': SearchDatastore_Task, 'duration_secs': 0.010868} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.169377] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.169624] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.169867] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.170034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.170225] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.170495] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bae4310c-019a-4bf8-80d5-6ea061510313 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.180730] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.180955] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.182021] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99e9f38a-e60e-4e4e-b1eb-80f18ee58416 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.188859] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 858.188859] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52942b67-4e94-4b0d-1248-95e62a477f60" [ 858.188859] env[61839]: _type = "Task" [ 858.188859] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.197923] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52942b67-4e94-4b0d-1248-95e62a477f60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.468028] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314532, 'name': ReconfigVM_Task, 'duration_secs': 0.361975} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.468171] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314531, 'name': ReconfigVM_Task, 'duration_secs': 0.387586} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.468344] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Reconfigured VM instance instance-00000049 to attach disk [datastore1] c996d7db-4b73-4445-9989-4efb2cd852e8/c996d7db-4b73-4445-9989-4efb2cd852e8.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.469013] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 2cb53e37-8b0b-48b7-a973-061b91df46df/2cb53e37-8b0b-48b7-a973-061b91df46df.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.469546] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ffb6e61-e187-4d2a-bb35-f46eb87f8833 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.471130] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4abd280f-cc24-4650-b8b7-14643471aad9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.481986] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314533, 'name': Rename_Task, 'duration_secs': 0.240246} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.484260] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 858.484614] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 858.484614] env[61839]: value = "task-1314535" [ 858.484614] env[61839]: _type = "Task" [ 858.484614] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.485142] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 858.485142] env[61839]: value = "task-1314534" [ 858.485142] env[61839]: _type = "Task" [ 858.485142] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.485307] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72119b92-d8d6-4b7a-9c6b-5f6a6d0a5b53 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.500865] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314534, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.504917] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314535, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.505239] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 858.505239] env[61839]: value = "task-1314536" [ 858.505239] env[61839]: _type = "Task" [ 858.505239] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.513674] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314536, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.702058] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52942b67-4e94-4b0d-1248-95e62a477f60, 'name': SearchDatastore_Task, 'duration_secs': 0.023399} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.702401] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8c27a91-4240-4301-a070-fbdaa7c42d3e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.708314] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 858.708314] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52314a3b-b6b0-d550-6bc5-f012bebe8a25" [ 858.708314] env[61839]: _type = "Task" [ 858.708314] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.716521] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52314a3b-b6b0-d550-6bc5-f012bebe8a25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.776070] env[61839]: DEBUG nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 858.803765] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 858.804916] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 858.804916] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.804916] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 858.804916] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.804916] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 858.804916] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 858.805198] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 858.805235] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 858.805462] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 858.805666] env[61839]: DEBUG nova.virt.hardware [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 858.806574] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391d24bd-87df-419d-bb43-2913f230ef1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.818011] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189baa6e-ceba-4d31-825d-27902b73913d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.003368] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314535, 'name': Rename_Task, 'duration_secs': 0.163958} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.006718] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.007133] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314534, 'name': Rename_Task, 'duration_secs': 0.184607} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.009989] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2afcc03-6788-4fd4-bb56-8981845782a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.014816] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.015549] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1433cb09-94c4-47d6-8372-a277014cf32d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.024437] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 859.024437] env[61839]: value = "task-1314537" [ 859.024437] env[61839]: _type = "Task" [ 859.024437] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.030642] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314536, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.031148] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 859.031148] env[61839]: value = "task-1314538" [ 859.031148] env[61839]: _type = "Task" [ 859.031148] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.052539] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314537, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.061369] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314538, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.087698] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61898af3-8a3b-4701-b295-44bef5d38af5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.095948] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83e7e6b-145a-4db2-929e-454fd53c2879 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.133634] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba84ffa3-8a54-4561-8c7c-e350ac424f6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.144325] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba702c3-7c95-4cfd-91b1-b06558bb926d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.159314] env[61839]: DEBUG nova.compute.provider_tree [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.220380] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52314a3b-b6b0-d550-6bc5-f012bebe8a25, 'name': SearchDatastore_Task, 'duration_secs': 0.07031} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.220694] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.220984] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] ef49a6f5-27c3-4595-af65-d6a5aa47d4e4/ef49a6f5-27c3-4595-af65-d6a5aa47d4e4.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 859.221303] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ceff15ec-981c-4f57-b676-034709468622 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.231615] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 859.231615] env[61839]: value = "task-1314539" [ 859.231615] env[61839]: _type = "Task" [ 859.231615] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.239745] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.269929] env[61839]: DEBUG nova.compute.manager [req-6a3dd331-18da-4135-8ece-85436c9112de req-b9a181b9-58b3-4a64-83f7-db749d28f7e7 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Received event network-vif-plugged-ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.269929] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a3dd331-18da-4135-8ece-85436c9112de req-b9a181b9-58b3-4a64-83f7-db749d28f7e7 service nova] Acquiring lock "56369316-a445-4a2a-a0a6-967074104e19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.269929] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a3dd331-18da-4135-8ece-85436c9112de req-b9a181b9-58b3-4a64-83f7-db749d28f7e7 service nova] Lock "56369316-a445-4a2a-a0a6-967074104e19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.269929] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a3dd331-18da-4135-8ece-85436c9112de req-b9a181b9-58b3-4a64-83f7-db749d28f7e7 service nova] Lock "56369316-a445-4a2a-a0a6-967074104e19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.269929] env[61839]: DEBUG nova.compute.manager [req-6a3dd331-18da-4135-8ece-85436c9112de req-b9a181b9-58b3-4a64-83f7-db749d28f7e7 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] No waiting events found dispatching network-vif-plugged-ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 859.269929] env[61839]: WARNING nova.compute.manager [req-6a3dd331-18da-4135-8ece-85436c9112de req-b9a181b9-58b3-4a64-83f7-db749d28f7e7 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Received unexpected event network-vif-plugged-ef2288b6-c4de-43f8-95c9-22511e164c36 for instance with vm_state building and task_state spawning. [ 859.448137] env[61839]: DEBUG nova.network.neutron [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Successfully updated port: ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.518985] env[61839]: DEBUG oslo_vmware.api [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314536, 'name': PowerOnVM_Task, 'duration_secs': 0.862662} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.519322] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.519541] env[61839]: INFO nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Took 10.05 seconds to spawn the instance on the hypervisor. [ 859.519725] env[61839]: DEBUG nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.520537] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e81947-2afb-4723-bb4b-626eea1aaeb7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.542896] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314537, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.553244] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314538, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.663197] env[61839]: DEBUG nova.scheduler.client.report [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.742584] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314539, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.950963] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.951424] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.951424] env[61839]: DEBUG nova.network.neutron [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.044146] env[61839]: DEBUG oslo_vmware.api [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314537, 'name': PowerOnVM_Task, 'duration_secs': 0.840686} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.049190] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.049436] env[61839]: INFO nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Took 8.15 seconds to spawn the instance on the hypervisor. [ 860.049680] env[61839]: DEBUG nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 860.050157] env[61839]: INFO nova.compute.manager [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Took 28.40 seconds to build instance. [ 860.051838] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c102603-ea91-47de-81df-8bfa4ce28864 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.065881] env[61839]: DEBUG oslo_vmware.api [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314538, 'name': PowerOnVM_Task, 'duration_secs': 0.590015} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.066784] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.067015] env[61839]: INFO nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Took 13.13 seconds to spawn the instance on the hypervisor. [ 860.067208] env[61839]: DEBUG nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 860.067960] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6bd37a-5997-4416-a8ef-34daf6724fb8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.168368] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.169047] env[61839]: DEBUG nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 860.171675] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.887s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.173179] env[61839]: INFO nova.compute.claims [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.240467] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314539, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.81465} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.240770] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] ef49a6f5-27c3-4595-af65-d6a5aa47d4e4/ef49a6f5-27c3-4595-af65-d6a5aa47d4e4.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.241046] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.241327] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c458018b-ced4-4c8a-9a16-2f3e9d16b27d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.248417] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 860.248417] env[61839]: value = "task-1314540" [ 860.248417] env[61839]: _type = "Task" [ 860.248417] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.258658] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314540, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.509476] env[61839]: DEBUG nova.network.neutron [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.557621] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0de8673c-e8e7-4f9c-9c62-36edbd7d903b tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "c180cc04-79da-4529-a905-1985a85b7cf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.943s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.574602] env[61839]: INFO nova.compute.manager [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Took 27.39 seconds to build instance. [ 860.588347] env[61839]: INFO nova.compute.manager [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Took 34.24 seconds to build instance. [ 860.681224] env[61839]: DEBUG nova.compute.utils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 860.682734] env[61839]: DEBUG nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 860.682888] env[61839]: DEBUG nova.network.neutron [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 860.759070] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314540, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078038} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.759372] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.760229] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79366424-ca84-46bc-a27b-5474b121c83f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.784958] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] ef49a6f5-27c3-4595-af65-d6a5aa47d4e4/ef49a6f5-27c3-4595-af65-d6a5aa47d4e4.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.786674] env[61839]: DEBUG nova.policy [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b9ca67c278b4cb9a83ec3c6ce42af5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5aba1e066cb4400dbbacc92f393962e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 860.788166] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-328b0511-7b2c-4d6a-ab7b-d2995e9ed3d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.810259] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 860.810259] env[61839]: value = "task-1314541" [ 860.810259] env[61839]: _type = "Task" [ 860.810259] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.817854] env[61839]: DEBUG nova.network.neutron [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.823736] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.077016] env[61839]: DEBUG oslo_concurrency.lockutils [None req-50defe20-1e8f-44c8-9564-69613a54695f tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.333s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.091460] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e959dc4-0080-49d1-93fb-cad2bd59d9d7 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.028s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.188343] env[61839]: DEBUG nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 861.315960] env[61839]: DEBUG nova.network.neutron [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Successfully created port: c47afc9a-8a60-4c82-9548-b644e8e26492 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.327155] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.327628] env[61839]: DEBUG nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Instance network_info: |[{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 861.327810] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314541, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.329088] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:71:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef2288b6-c4de-43f8-95c9-22511e164c36', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.337248] env[61839]: DEBUG oslo.service.loopingcall [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.338432] env[61839]: DEBUG nova.compute.manager [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Received event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 861.338666] env[61839]: DEBUG nova.compute.manager [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing instance network info cache due to event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 861.338922] env[61839]: DEBUG oslo_concurrency.lockutils [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] Acquiring lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.339125] env[61839]: DEBUG oslo_concurrency.lockutils [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] Acquired lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.339334] env[61839]: DEBUG nova.network.neutron [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.343633] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.344245] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-801151cf-13f7-4c18-8bda-b78e7ab77278 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.361295] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "c996d7db-4b73-4445-9989-4efb2cd852e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.361800] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.362082] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "c996d7db-4b73-4445-9989-4efb2cd852e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.362329] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.362536] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.365204] env[61839]: INFO nova.compute.manager [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Terminating instance [ 861.366757] env[61839]: DEBUG nova.compute.manager [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.366984] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.367990] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4eb80a-5ab7-4206-bf66-34315946b440 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.372165] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.372165] env[61839]: value = "task-1314542" [ 861.372165] env[61839]: _type = "Task" [ 861.372165] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.379905] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.380851] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3ef40c2-bdc9-451c-b132-7d04c6f6e02b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.385780] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314542, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.387849] env[61839]: DEBUG oslo_vmware.api [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 861.387849] env[61839]: value = "task-1314543" [ 861.387849] env[61839]: _type = "Task" [ 861.387849] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.400352] env[61839]: DEBUG oslo_vmware.api [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.548060] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4768ef-c594-4fa5-ba9b-61bd1b018571 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.556463] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc127d0-34f5-4245-8518-db51a19b310b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.594194] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93764dd7-dfff-4584-ae08-8328200f5378 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.606596] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a1a85f-f03e-434a-bc3f-e20b44f6ad18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.624377] env[61839]: DEBUG nova.compute.provider_tree [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.711367] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.711584] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.711859] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.711989] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.712342] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.714864] env[61839]: INFO nova.compute.manager [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Terminating instance [ 861.722488] env[61839]: DEBUG nova.compute.manager [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.722488] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.722687] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a81f55b-849b-4feb-8e1c-e801ecf0b854 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.731208] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.732020] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33ad090a-1b2a-4139-aae6-1956142a9267 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.743030] env[61839]: DEBUG oslo_vmware.api [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 861.743030] env[61839]: value = "task-1314544" [ 861.743030] env[61839]: _type = "Task" [ 861.743030] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.750825] env[61839]: DEBUG oslo_vmware.api [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.790751] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "c180cc04-79da-4529-a905-1985a85b7cf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.790751] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "c180cc04-79da-4529-a905-1985a85b7cf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.790751] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "c180cc04-79da-4529-a905-1985a85b7cf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.790751] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "c180cc04-79da-4529-a905-1985a85b7cf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.790914] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "c180cc04-79da-4529-a905-1985a85b7cf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.793073] env[61839]: INFO nova.compute.manager [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Terminating instance [ 861.794918] env[61839]: DEBUG nova.compute.manager [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.795271] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.796068] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9711002b-9a0f-4112-b330-f4fa2cdcedfd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.804621] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.804915] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92d3a140-0e67-4fa8-a53c-76429751a5fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.812315] env[61839]: DEBUG oslo_vmware.api [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 861.812315] env[61839]: value = "task-1314545" [ 861.812315] env[61839]: _type = "Task" [ 861.812315] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.825255] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314541, 'name': ReconfigVM_Task, 'duration_secs': 0.716796} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.828549] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Reconfigured VM instance instance-0000004a to attach disk [datastore1] ef49a6f5-27c3-4595-af65-d6a5aa47d4e4/ef49a6f5-27c3-4595-af65-d6a5aa47d4e4.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.829370] env[61839]: DEBUG oslo_vmware.api [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.829668] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0c9d5f7-c484-431e-95eb-26dfaed7164b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.838621] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 861.838621] env[61839]: value = "task-1314546" [ 861.838621] env[61839]: _type = "Task" [ 861.838621] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.850798] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314546, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.885258] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314542, 'name': CreateVM_Task, 'duration_secs': 0.441835} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.886030] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.886212] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.886479] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.886860] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 861.887227] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b5ee603-ecf5-4fab-90e6-f55613aea0d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.899662] env[61839]: DEBUG oslo_vmware.api [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314543, 'name': PowerOffVM_Task, 'duration_secs': 0.292115} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.901419] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.901659] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.902148] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 861.902148] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522ec16e-a725-3bbc-224b-b6c83c5046d8" [ 861.902148] env[61839]: _type = "Task" [ 861.902148] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.902439] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edbe633b-64c2-40c8-a5b0-04a606ba5342 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.917373] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522ec16e-a725-3bbc-224b-b6c83c5046d8, 'name': SearchDatastore_Task, 'duration_secs': 0.015042} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.917728] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.917971] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.918316] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.918507] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.918700] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.918979] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b5739b5-6cec-46be-9355-38775f45b25d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.931513] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.931513] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.931905] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7b2c1c4-cff1-4db3-a91b-1cadba90319c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.939595] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 861.939595] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52af938e-1f94-c3ba-7d94-2e2e5972c234" [ 861.939595] env[61839]: _type = "Task" [ 861.939595] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.951297] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52af938e-1f94-c3ba-7d94-2e2e5972c234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.988910] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 861.989297] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 861.989591] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleting the datastore file [datastore1] c996d7db-4b73-4445-9989-4efb2cd852e8 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.989945] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cb5a990-9de5-4393-baa1-8ff5be1d30f7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.999145] env[61839]: DEBUG oslo_vmware.api [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 861.999145] env[61839]: value = "task-1314548" [ 861.999145] env[61839]: _type = "Task" [ 861.999145] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.013378] env[61839]: DEBUG oslo_vmware.api [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.101230] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "2cb53e37-8b0b-48b7-a973-061b91df46df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.101575] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.101772] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.101988] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.102265] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.104640] env[61839]: INFO nova.compute.manager [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Terminating instance [ 862.109920] env[61839]: DEBUG nova.compute.manager [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 862.109920] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.109920] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baaa3020-f428-44ea-8d6b-2a998118abae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.119605] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.119859] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1f2f098-b24f-4103-a6a0-22cc0ab5c575 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.127993] env[61839]: DEBUG nova.scheduler.client.report [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.131474] env[61839]: DEBUG oslo_vmware.api [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 862.131474] env[61839]: value = "task-1314549" [ 862.131474] env[61839]: _type = "Task" [ 862.131474] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.145861] env[61839]: DEBUG oslo_vmware.api [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.163249] env[61839]: DEBUG nova.network.neutron [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updated VIF entry in instance network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.163633] env[61839]: DEBUG nova.network.neutron [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.199142] env[61839]: DEBUG nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 862.228314] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 862.228583] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 862.228772] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.228963] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 862.229143] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.229294] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 862.229568] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 862.229765] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 862.229946] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 862.230132] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 862.230366] env[61839]: DEBUG nova.virt.hardware [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 862.231196] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb1bc87-a166-493b-b5ba-1d836a06ac36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.240272] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e78b875-df4d-49cb-9104-2f488e9253c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.260863] env[61839]: DEBUG oslo_vmware.api [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314544, 'name': PowerOffVM_Task, 'duration_secs': 0.230852} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.261409] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.261592] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.261853] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5afd80c3-86fd-48b8-98dd-f7e12948cf80 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.327489] env[61839]: DEBUG oslo_vmware.api [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314545, 'name': PowerOffVM_Task, 'duration_secs': 0.169801} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.327864] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.328076] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.328467] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c388edf4-35a0-460c-8727-ed78a57aaa40 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.336651] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.336913] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.337133] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleting the datastore file [datastore2] ce59c937-fc0b-464f-baaa-461c6f6c2d57 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.337427] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5cb58b2c-2d72-4e29-871f-00dd5dcc71b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.349921] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314546, 'name': Rename_Task, 'duration_secs': 0.19455} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.351292] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 862.351644] env[61839]: DEBUG oslo_vmware.api [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 862.351644] env[61839]: value = "task-1314552" [ 862.351644] env[61839]: _type = "Task" [ 862.351644] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.351848] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-536c1196-ff63-4747-947a-fbb54cd9a77a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.362313] env[61839]: DEBUG oslo_vmware.api [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.363647] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 862.363647] env[61839]: value = "task-1314553" [ 862.363647] env[61839]: _type = "Task" [ 862.363647] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.371352] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.419830] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.420187] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.420409] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleting the datastore file [datastore2] c180cc04-79da-4529-a905-1985a85b7cf5 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.420737] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-375c9fe6-ff18-441e-80dd-4361a654b402 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.428777] env[61839]: DEBUG oslo_vmware.api [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for the task: (returnval){ [ 862.428777] env[61839]: value = "task-1314554" [ 862.428777] env[61839]: _type = "Task" [ 862.428777] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.437672] env[61839]: DEBUG oslo_vmware.api [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314554, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.449090] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52af938e-1f94-c3ba-7d94-2e2e5972c234, 'name': SearchDatastore_Task, 'duration_secs': 0.013873} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.449883] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51df6648-a45a-40ac-baab-c1f07a2a9b64 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.455307] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 862.455307] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5244e62d-6905-f7e0-8a5c-fcaca703d286" [ 862.455307] env[61839]: _type = "Task" [ 862.455307] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.462872] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5244e62d-6905-f7e0-8a5c-fcaca703d286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.513312] env[61839]: DEBUG oslo_vmware.api [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147566} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.513697] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.513998] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.514263] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.514519] env[61839]: INFO nova.compute.manager [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 862.514828] env[61839]: DEBUG oslo.service.loopingcall [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.515053] env[61839]: DEBUG nova.compute.manager [-] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.515162] env[61839]: DEBUG nova.network.neutron [-] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.563746] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.563994] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.636734] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.637296] env[61839]: DEBUG nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 862.640992] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.058s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.640992] env[61839]: DEBUG nova.objects.instance [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lazy-loading 'resources' on Instance uuid 047080fa-8781-47b1-89d8-2e4c8031b164 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 862.647533] env[61839]: DEBUG oslo_vmware.api [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314549, 'name': PowerOffVM_Task, 'duration_secs': 0.220448} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.647863] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.647863] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.648389] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1a3d6fa-7152-4a75-93a3-8439d08cb65c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.667684] env[61839]: DEBUG oslo_concurrency.lockutils [req-09523185-7233-4ea1-a603-477fe42d3679 req-55e6d6b3-914c-4bd0-ab63-d0fdea966c51 service nova] Releasing lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.768657] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.768657] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.768657] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Deleting the datastore file [datastore2] 2cb53e37-8b0b-48b7-a973-061b91df46df {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.768657] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6644b6b3-8550-4e92-9573-ca3c6c12b2bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.778034] env[61839]: DEBUG oslo_vmware.api [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for the task: (returnval){ [ 862.778034] env[61839]: value = "task-1314556" [ 862.778034] env[61839]: _type = "Task" [ 862.778034] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.787645] env[61839]: DEBUG oslo_vmware.api [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.869022] env[61839]: DEBUG oslo_vmware.api [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146218} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.869667] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.870136] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.870536] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.873104] env[61839]: INFO nova.compute.manager [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Took 1.15 seconds to destroy the instance on the hypervisor. [ 862.873104] env[61839]: DEBUG oslo.service.loopingcall [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.873104] env[61839]: DEBUG nova.compute.manager [-] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.873104] env[61839]: DEBUG nova.network.neutron [-] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.879340] env[61839]: DEBUG oslo_vmware.api [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314553, 'name': PowerOnVM_Task, 'duration_secs': 0.473827} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.879836] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.880307] env[61839]: INFO nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Took 8.53 seconds to spawn the instance on the hypervisor. [ 862.880697] env[61839]: DEBUG nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 862.881743] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5738cc1-c54d-453f-851f-c07179f8f313 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.939857] env[61839]: DEBUG oslo_vmware.api [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Task: {'id': task-1314554, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18105} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.940170] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.940373] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.940558] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.940737] env[61839]: INFO nova.compute.manager [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 862.940986] env[61839]: DEBUG oslo.service.loopingcall [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.941465] env[61839]: DEBUG nova.compute.manager [-] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.941566] env[61839]: DEBUG nova.network.neutron [-] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.966186] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5244e62d-6905-f7e0-8a5c-fcaca703d286, 'name': SearchDatastore_Task, 'duration_secs': 0.019258} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.966901] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.967254] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/56369316-a445-4a2a-a0a6-967074104e19.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.967599] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b88b0c44-9cb5-44ae-9eb7-78820fca025a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.976067] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 862.976067] env[61839]: value = "task-1314557" [ 862.976067] env[61839]: _type = "Task" [ 862.976067] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.984774] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.073290] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.073290] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 863.073290] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 863.147318] env[61839]: DEBUG nova.compute.utils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.155179] env[61839]: DEBUG nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 863.155829] env[61839]: DEBUG nova.network.neutron [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.293389] env[61839]: DEBUG oslo_vmware.api [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Task: {'id': task-1314556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153783} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.299389] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.299660] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.299865] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.300213] env[61839]: INFO nova.compute.manager [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Took 1.19 seconds to destroy the instance on the hypervisor. [ 863.300516] env[61839]: DEBUG oslo.service.loopingcall [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.301300] env[61839]: DEBUG nova.compute.manager [-] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 863.301407] env[61839]: DEBUG nova.network.neutron [-] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.377755] env[61839]: DEBUG nova.policy [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '430b14eaa0e94ef39fb0f95269448ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25686a503d044467a1d641f14e14c65c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 863.411666] env[61839]: INFO nova.compute.manager [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Took 29.70 seconds to build instance. [ 863.494325] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314557, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.536066] env[61839]: DEBUG nova.network.neutron [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Successfully updated port: c47afc9a-8a60-4c82-9548-b644e8e26492 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.574139] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28b1094-980e-4c9a-9a08-fb426f1dbd25 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.585503] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Skipping network cache update for instance because it is being deleted. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 863.585503] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Skipping network cache update for instance because it is being deleted. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 863.585503] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Skipping network cache update for instance because it is being deleted. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 863.585503] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Skipping network cache update for instance because it is being deleted. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 863.585503] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.585735] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.585735] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.595294] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cd9a1b-7b1e-4381-a8d6-354dbbac893b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.630156] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4d5708-62d9-4fe2-9816-4c3f440f5ee5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.639864] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4641af7-426e-46d1-90ad-69b6b806ab98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.658239] env[61839]: DEBUG nova.compute.provider_tree [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.659934] env[61839]: DEBUG nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 863.676650] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.676799] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.676950] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 863.677127] env[61839]: DEBUG nova.objects.instance [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lazy-loading 'info_cache' on Instance uuid 5c29c188-a34b-4751-9f8b-166af7b15088 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.718847] env[61839]: DEBUG nova.compute.manager [req-2d134ee4-9e27-4109-bdb3-cce9a5115869 req-4b350030-5d30-4cf0-9ff4-6c8972324868 service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Received event network-vif-plugged-c47afc9a-8a60-4c82-9548-b644e8e26492 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.719156] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d134ee4-9e27-4109-bdb3-cce9a5115869 req-4b350030-5d30-4cf0-9ff4-6c8972324868 service nova] Acquiring lock "e47c08c6-5de3-48b0-8327-57ddb273555f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.719336] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d134ee4-9e27-4109-bdb3-cce9a5115869 req-4b350030-5d30-4cf0-9ff4-6c8972324868 service nova] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.719572] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d134ee4-9e27-4109-bdb3-cce9a5115869 req-4b350030-5d30-4cf0-9ff4-6c8972324868 service nova] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.719842] env[61839]: DEBUG nova.compute.manager [req-2d134ee4-9e27-4109-bdb3-cce9a5115869 req-4b350030-5d30-4cf0-9ff4-6c8972324868 service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] No waiting events found dispatching network-vif-plugged-c47afc9a-8a60-4c82-9548-b644e8e26492 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 863.720057] env[61839]: WARNING nova.compute.manager [req-2d134ee4-9e27-4109-bdb3-cce9a5115869 req-4b350030-5d30-4cf0-9ff4-6c8972324868 service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Received unexpected event network-vif-plugged-c47afc9a-8a60-4c82-9548-b644e8e26492 for instance with vm_state building and task_state spawning. [ 863.747820] env[61839]: DEBUG nova.compute.manager [req-434c5de3-d135-4e67-a602-1458f5b4a751 req-3476105f-0d48-4a23-94fd-4abd4b4a228c service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Received event network-vif-deleted-f7226659-074b-4580-8d02-5d637bbba250 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.748042] env[61839]: INFO nova.compute.manager [req-434c5de3-d135-4e67-a602-1458f5b4a751 req-3476105f-0d48-4a23-94fd-4abd4b4a228c service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Neutron deleted interface f7226659-074b-4580-8d02-5d637bbba250; detaching it from the instance and deleting it from the info cache [ 863.748273] env[61839]: DEBUG nova.network.neutron [req-434c5de3-d135-4e67-a602-1458f5b4a751 req-3476105f-0d48-4a23-94fd-4abd4b4a228c service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.774646] env[61839]: DEBUG nova.compute.manager [req-cd3e5d6f-5fe8-4547-bcce-d9d8afc4fdcf req-a896ac00-2e61-4e5b-b077-254ac06989ce service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Received event network-vif-deleted-7792be0d-d99c-49ba-98bb-a2b56bf71b17 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.774646] env[61839]: INFO nova.compute.manager [req-cd3e5d6f-5fe8-4547-bcce-d9d8afc4fdcf req-a896ac00-2e61-4e5b-b077-254ac06989ce service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Neutron deleted interface 7792be0d-d99c-49ba-98bb-a2b56bf71b17; detaching it from the instance and deleting it from the info cache [ 863.774801] env[61839]: DEBUG nova.network.neutron [req-cd3e5d6f-5fe8-4547-bcce-d9d8afc4fdcf req-a896ac00-2e61-4e5b-b077-254ac06989ce service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.849547] env[61839]: WARNING oslo_messaging._drivers.amqpdriver [-] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 863.875685] env[61839]: DEBUG nova.network.neutron [-] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.914499] env[61839]: DEBUG nova.network.neutron [-] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.916433] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6de79a71-94ce-468a-ba01-6c81e70cb056 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.150s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.987863] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584156} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.988164] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/56369316-a445-4a2a-a0a6-967074104e19.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.988369] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.988630] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a6194bd-a63f-4b2d-b855-3afd7abaace3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.996979] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 863.996979] env[61839]: value = "task-1314558" [ 863.996979] env[61839]: _type = "Task" [ 863.996979] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.006079] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.039443] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "refresh_cache-e47c08c6-5de3-48b0-8327-57ddb273555f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.039539] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "refresh_cache-e47c08c6-5de3-48b0-8327-57ddb273555f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.039627] env[61839]: DEBUG nova.network.neutron [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.125360] env[61839]: DEBUG nova.network.neutron [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Successfully created port: c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.163438] env[61839]: DEBUG nova.scheduler.client.report [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.253211] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e31a081-991b-4e23-bb8f-aba90bba99a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.267014] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde35850-f369-4673-b267-0562b254e115 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.281621] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6216140-35ae-4686-93fd-baaefdb7ea1c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.289477] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e911b1-7d19-42d6-96e7-e96a41bf2e03 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.309621] env[61839]: DEBUG nova.compute.manager [req-434c5de3-d135-4e67-a602-1458f5b4a751 req-3476105f-0d48-4a23-94fd-4abd4b4a228c service nova] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Detach interface failed, port_id=f7226659-074b-4580-8d02-5d637bbba250, reason: Instance ce59c937-fc0b-464f-baaa-461c6f6c2d57 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 864.330974] env[61839]: DEBUG nova.network.neutron [-] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.331754] env[61839]: DEBUG nova.compute.manager [req-cd3e5d6f-5fe8-4547-bcce-d9d8afc4fdcf req-a896ac00-2e61-4e5b-b077-254ac06989ce service nova] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Detach interface failed, port_id=7792be0d-d99c-49ba-98bb-a2b56bf71b17, reason: Instance c996d7db-4b73-4445-9989-4efb2cd852e8 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 864.351410] env[61839]: DEBUG nova.network.neutron [-] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.379840] env[61839]: INFO nova.compute.manager [-] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Took 1.51 seconds to deallocate network for instance. [ 864.419206] env[61839]: INFO nova.compute.manager [-] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Took 1.90 seconds to deallocate network for instance. [ 864.507378] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192858} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.507670] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.508500] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfef835c-66a0-4d75-b053-5ccdd331387c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.532523] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/56369316-a445-4a2a-a0a6-967074104e19.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.534145] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-936c3a7a-28a1-4572-a29c-d6553e4d78ff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.557900] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 864.557900] env[61839]: value = "task-1314559" [ 864.557900] env[61839]: _type = "Task" [ 864.557900] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.569901] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314559, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.598422] env[61839]: DEBUG nova.network.neutron [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.671656] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.675095] env[61839]: DEBUG nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 864.677445] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.742s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.677687] env[61839]: DEBUG nova.objects.instance [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lazy-loading 'resources' on Instance uuid 49d4720b-83e3-47d9-b727-5bb255de2e7c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.710075] env[61839]: INFO nova.scheduler.client.report [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted allocations for instance 047080fa-8781-47b1-89d8-2e4c8031b164 [ 864.729123] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.731253] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.732053] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.732293] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.732454] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.732618] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.732841] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.734178] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.737155] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.737916] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.737916] env[61839]: DEBUG nova.virt.hardware [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.741248] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c52305-6b07-418e-895e-bbcf6c7baa5a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.751760] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c94b2f9-332d-431d-8d60-5f05dd9153c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.836132] env[61839]: DEBUG nova.network.neutron [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Updating instance_info_cache with network_info: [{"id": "c47afc9a-8a60-4c82-9548-b644e8e26492", "address": "fa:16:3e:68:19:2f", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc47afc9a-8a", "ovs_interfaceid": "c47afc9a-8a60-4c82-9548-b644e8e26492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.836132] env[61839]: INFO nova.compute.manager [-] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Took 1.53 seconds to deallocate network for instance. [ 864.855048] env[61839]: INFO nova.compute.manager [-] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Took 1.91 seconds to deallocate network for instance. [ 864.885458] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.929087] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.069290] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314559, 'name': ReconfigVM_Task, 'duration_secs': 0.293361} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.069636] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/56369316-a445-4a2a-a0a6-967074104e19.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.070282] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96649db3-96ba-4686-930b-9eaa1266e3c6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.077400] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 865.077400] env[61839]: value = "task-1314560" [ 865.077400] env[61839]: _type = "Task" [ 865.077400] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.088628] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314560, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.224528] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4a30a124-7425-49b0-9db2-f6c7b005cfc6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "047080fa-8781-47b1-89d8-2e4c8031b164" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.012s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.337024] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "refresh_cache-e47c08c6-5de3-48b0-8327-57ddb273555f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.337346] env[61839]: DEBUG nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Instance network_info: |[{"id": "c47afc9a-8a60-4c82-9548-b644e8e26492", "address": "fa:16:3e:68:19:2f", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc47afc9a-8a", "ovs_interfaceid": "c47afc9a-8a60-4c82-9548-b644e8e26492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 865.337792] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:19:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c47afc9a-8a60-4c82-9548-b644e8e26492', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.347366] env[61839]: DEBUG oslo.service.loopingcall [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.351941] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.352264] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.352873] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc09b44e-3051-4bbe-957b-083c306b787f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.371163] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.371565] env[61839]: DEBUG nova.compute.manager [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 865.373843] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8d097b-97e1-4d94-b58b-c1988c43351d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.387242] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.387242] env[61839]: value = "task-1314561" [ 865.387242] env[61839]: _type = "Task" [ 865.387242] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.399605] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314561, 'name': CreateVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.491197] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce2c545-cf15-4588-9576-0ff845b21fa8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.499955] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aadef59-87a2-47f0-941d-19c74b22a521 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.534859] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7e25b4-8e51-44b8-864a-b04e3b7f329a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.542922] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bc8a97-ff0a-4eb6-b01b-229fb3ede588 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.559449] env[61839]: DEBUG nova.compute.manager [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 865.560036] env[61839]: DEBUG nova.compute.provider_tree [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.564966] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b087e6-6ca6-4ada-8d21-d46563aa1830 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.586570] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314560, 'name': Rename_Task, 'duration_secs': 0.215258} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.586905] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.587245] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b43d6e88-0df2-4b22-b63c-ddb7562a8626 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.594120] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 865.594120] env[61839]: value = "task-1314562" [ 865.594120] env[61839]: _type = "Task" [ 865.594120] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.598965] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updating instance_info_cache with network_info: [{"id": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "address": "fa:16:3e:5a:64:4f", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e228a-75", "ovs_interfaceid": "2d6e228a-75ff-4bff-bc8d-bdde3218cf40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.606337] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314562, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.798763] env[61839]: DEBUG nova.compute.manager [req-ac0d9860-2c1a-402b-878f-8a5346b3c268 req-35861216-1a07-43e7-8838-02729ba891ba service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received event network-vif-deleted-aa6c596a-924c-4f3b-b846-88212a1fdbfe {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.798992] env[61839]: DEBUG nova.compute.manager [req-ac0d9860-2c1a-402b-878f-8a5346b3c268 req-35861216-1a07-43e7-8838-02729ba891ba service nova] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Received event network-vif-deleted-9b5b64c2-c5e6-4188-918b-22b9150363f0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.817191] env[61839]: DEBUG nova.network.neutron [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Successfully updated port: c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.824991] env[61839]: DEBUG nova.compute.manager [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Received event network-changed-c47afc9a-8a60-4c82-9548-b644e8e26492 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.826495] env[61839]: DEBUG nova.compute.manager [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Refreshing instance network info cache due to event network-changed-c47afc9a-8a60-4c82-9548-b644e8e26492. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 865.826765] env[61839]: DEBUG oslo_concurrency.lockutils [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] Acquiring lock "refresh_cache-e47c08c6-5de3-48b0-8327-57ddb273555f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.826907] env[61839]: DEBUG oslo_concurrency.lockutils [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] Acquired lock "refresh_cache-e47c08c6-5de3-48b0-8327-57ddb273555f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.827094] env[61839]: DEBUG nova.network.neutron [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Refreshing network info cache for port c47afc9a-8a60-4c82-9548-b644e8e26492 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.888807] env[61839]: INFO nova.compute.manager [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] instance snapshotting [ 865.889572] env[61839]: DEBUG nova.objects.instance [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'flavor' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.900398] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314561, 'name': CreateVM_Task, 'duration_secs': 0.312763} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.900630] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.901489] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.901720] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.902075] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 865.902642] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e4336a0-406b-4511-9b20-2e5956f7809e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.907721] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 865.907721] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec6c2f-e8ff-d1ca-a030-d81debcea001" [ 865.907721] env[61839]: _type = "Task" [ 865.907721] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.916106] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec6c2f-e8ff-d1ca-a030-d81debcea001, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.066471] env[61839]: DEBUG nova.scheduler.client.report [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.073445] env[61839]: INFO nova.compute.manager [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] instance snapshotting [ 866.076656] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace913ba-5ea1-484f-8aba-95ff05916e96 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.102589] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-5c29c188-a34b-4751-9f8b-166af7b15088" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.102942] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 866.108257] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5589c07-c148-4854-b446-14dd4b8f6828 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.112309] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.112570] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.113602] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.113826] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.114047] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.114592] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.114800] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 866.121555] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.123651] env[61839]: DEBUG oslo_vmware.api [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314562, 'name': PowerOnVM_Task, 'duration_secs': 0.469391} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.126467] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.126739] env[61839]: INFO nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Took 7.35 seconds to spawn the instance on the hypervisor. [ 866.127546] env[61839]: DEBUG nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.128180] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57c2c90-8ee3-465f-bd76-dadaa1ed8842 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.319281] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-a4463efc-ffca-4552-a072-cbf5fe062533" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.319615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-a4463efc-ffca-4552-a072-cbf5fe062533" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.319615] env[61839]: DEBUG nova.network.neutron [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.399529] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79874f2c-b3bd-496b-87df-82c36b6ea7a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.423252] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25440f6d-a251-4426-af0b-3885643afdbc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.431655] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec6c2f-e8ff-d1ca-a030-d81debcea001, 'name': SearchDatastore_Task, 'duration_secs': 0.010956} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.435759] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.436378] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.436378] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.436378] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.436587] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.438951] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de2705fc-3b09-468f-a90a-cf8e98b1b99b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.447219] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.447406] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.448225] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3272a6b0-b999-4045-b25c-529f2fb0a25f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.453064] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 866.453064] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528c4700-ca1c-eead-9682-347bb2392e36" [ 866.453064] env[61839]: _type = "Task" [ 866.453064] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.463754] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528c4700-ca1c-eead-9682-347bb2392e36, 'name': SearchDatastore_Task, 'duration_secs': 0.008399} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.464521] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-429bb2ae-b99e-4b01-ad55-74358b133152 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.469892] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 866.469892] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52507843-dd47-cb69-0055-6688eff9683c" [ 866.469892] env[61839]: _type = "Task" [ 866.469892] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.478645] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52507843-dd47-cb69-0055-6688eff9683c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.574013] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.577021] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.413s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.577887] env[61839]: INFO nova.compute.claims [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.589450] env[61839]: DEBUG nova.network.neutron [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Updated VIF entry in instance network info cache for port c47afc9a-8a60-4c82-9548-b644e8e26492. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.589795] env[61839]: DEBUG nova.network.neutron [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Updating instance_info_cache with network_info: [{"id": "c47afc9a-8a60-4c82-9548-b644e8e26492", "address": "fa:16:3e:68:19:2f", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc47afc9a-8a", "ovs_interfaceid": "c47afc9a-8a60-4c82-9548-b644e8e26492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.597091] env[61839]: INFO nova.scheduler.client.report [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Deleted allocations for instance 49d4720b-83e3-47d9-b727-5bb255de2e7c [ 866.627833] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.628826] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 866.629120] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-51415f85-6c22-4737-8519-645b9fb19b91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.637549] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 866.637549] env[61839]: value = "task-1314563" [ 866.637549] env[61839]: _type = "Task" [ 866.637549] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.651677] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314563, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.652177] env[61839]: INFO nova.compute.manager [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Took 26.72 seconds to build instance. [ 866.854423] env[61839]: DEBUG nova.network.neutron [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.943361] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 866.943687] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6883f785-64cb-42e4-bf33-9392668fa19c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.951384] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 866.951384] env[61839]: value = "task-1314564" [ 866.951384] env[61839]: _type = "Task" [ 866.951384] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.959432] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314564, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.987038] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52507843-dd47-cb69-0055-6688eff9683c, 'name': SearchDatastore_Task, 'duration_secs': 0.008222} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.987306] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.987571] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] e47c08c6-5de3-48b0-8327-57ddb273555f/e47c08c6-5de3-48b0-8327-57ddb273555f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 866.987846] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2aa75ec-e08b-4c5d-8844-82d21e05f87e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.994894] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 866.994894] env[61839]: value = "task-1314565" [ 866.994894] env[61839]: _type = "Task" [ 866.994894] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.003051] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.059213] env[61839]: DEBUG nova.network.neutron [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Updating instance_info_cache with network_info: [{"id": "c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8", "address": "fa:16:3e:91:e8:37", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc003fe42-e6", "ovs_interfaceid": "c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.092216] env[61839]: DEBUG oslo_concurrency.lockutils [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] Releasing lock "refresh_cache-e47c08c6-5de3-48b0-8327-57ddb273555f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.093030] env[61839]: DEBUG nova.compute.manager [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Received event network-vif-deleted-38a6ad3a-a979-417b-a8ac-65232af41a58 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 867.093030] env[61839]: DEBUG nova.compute.manager [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Received event network-vif-plugged-c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 867.093030] env[61839]: DEBUG oslo_concurrency.lockutils [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] Acquiring lock "a4463efc-ffca-4552-a072-cbf5fe062533-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.093205] env[61839]: DEBUG oslo_concurrency.lockutils [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] Lock "a4463efc-ffca-4552-a072-cbf5fe062533-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.093420] env[61839]: DEBUG oslo_concurrency.lockutils [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] Lock "a4463efc-ffca-4552-a072-cbf5fe062533-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.094037] env[61839]: DEBUG nova.compute.manager [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] No waiting events found dispatching network-vif-plugged-c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 867.094037] env[61839]: WARNING nova.compute.manager [req-bbe884db-84e2-4615-a2cc-0cfdb3cef52f req-67f1b699-47e8-4e07-a6ad-689aa9672bdb service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Received unexpected event network-vif-plugged-c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 for instance with vm_state building and task_state spawning. [ 867.105711] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d360fcd5-1768-4667-8854-1b10fb75b424 tempest-DeleteServersAdminTestJSON-905313253 tempest-DeleteServersAdminTestJSON-905313253-project-member] Lock "49d4720b-83e3-47d9-b727-5bb255de2e7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.365s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.148033] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314563, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.154236] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e98b0865-779a-457f-bdd0-ebf545e9b50c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.910s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.464684] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314564, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.507026] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314565, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.560936] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-a4463efc-ffca-4552-a072-cbf5fe062533" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.561295] env[61839]: DEBUG nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Instance network_info: |[{"id": "c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8", "address": "fa:16:3e:91:e8:37", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc003fe42-e6", "ovs_interfaceid": "c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 867.561734] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:e8:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.569625] env[61839]: DEBUG oslo.service.loopingcall [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.569872] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.570146] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffe69588-172a-4527-8a85-37d37065c55f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.593336] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.593336] env[61839]: value = "task-1314566" [ 867.593336] env[61839]: _type = "Task" [ 867.593336] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.601904] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314566, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.648043] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314563, 'name': CreateSnapshot_Task, 'duration_secs': 0.874296} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.648043] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 867.648927] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee9352d-5396-4e67-a9ea-7314aa85fb0d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.837509] env[61839]: DEBUG nova.compute.manager [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Received event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 867.837704] env[61839]: DEBUG nova.compute.manager [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing instance network info cache due to event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 867.837923] env[61839]: DEBUG oslo_concurrency.lockutils [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] Acquiring lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.839358] env[61839]: DEBUG oslo_concurrency.lockutils [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] Acquired lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.839358] env[61839]: DEBUG nova.network.neutron [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.859982] env[61839]: DEBUG nova.compute.manager [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Received event network-changed-c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 867.859982] env[61839]: DEBUG nova.compute.manager [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Refreshing instance network info cache due to event network-changed-c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 867.860102] env[61839]: DEBUG oslo_concurrency.lockutils [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] Acquiring lock "refresh_cache-a4463efc-ffca-4552-a072-cbf5fe062533" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.860370] env[61839]: DEBUG oslo_concurrency.lockutils [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] Acquired lock "refresh_cache-a4463efc-ffca-4552-a072-cbf5fe062533" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.860640] env[61839]: DEBUG nova.network.neutron [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Refreshing network info cache for port c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.900334] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b382cab-f995-4fd5-947d-f92ae328e85f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.908239] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4f2bd8-8557-416f-bd59-bf89c378d680 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.947499] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ac2461-36fa-4a4e-bdc4-db6f372e366c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.959298] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f161fc0-2b1d-414b-b363-f9a82601c5d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.968992] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314564, 'name': CreateSnapshot_Task, 'duration_secs': 0.93467} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.976708] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 867.977249] env[61839]: DEBUG nova.compute.provider_tree [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.978990] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90d1df1-7e85-4fc9-aaed-2a2474431834 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.005767] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514387} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.006050] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] e47c08c6-5de3-48b0-8327-57ddb273555f/e47c08c6-5de3-48b0-8327-57ddb273555f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.006270] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.006523] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9534b36-676b-4a38-ae48-121ae989dbb1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.013715] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 868.013715] env[61839]: value = "task-1314567" [ 868.013715] env[61839]: _type = "Task" [ 868.013715] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.021455] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.103636] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314566, 'name': CreateVM_Task, 'duration_secs': 0.331572} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.104302] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.104574] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.104746] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.105081] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.105340] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5794125f-c85b-4c0f-9ef6-7c7004f29e92 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.110017] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 868.110017] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5255c305-a7b5-78f3-89fe-4ed465f3f30c" [ 868.110017] env[61839]: _type = "Task" [ 868.110017] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.117520] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5255c305-a7b5-78f3-89fe-4ed465f3f30c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.170909] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 868.171257] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7cd1ba17-4f98-452f-8c4c-f13767389713 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.180320] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 868.180320] env[61839]: value = "task-1314568" [ 868.180320] env[61839]: _type = "Task" [ 868.180320] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.188799] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314568, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.486025] env[61839]: DEBUG nova.scheduler.client.report [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 868.501053] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 868.504888] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0eebd6b9-e5b4-4335-ba98-8156c950a362 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.518417] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 868.518417] env[61839]: value = "task-1314569" [ 868.518417] env[61839]: _type = "Task" [ 868.518417] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.526351] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058394} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.526973] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 868.527795] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb23eed-3412-4938-96d0-2a31d3ea828e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.534328] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314569, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.556516] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] e47c08c6-5de3-48b0-8327-57ddb273555f/e47c08c6-5de3-48b0-8327-57ddb273555f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.561167] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65ba8991-e8ce-472e-8c68-4b80e30058bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.581782] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 868.581782] env[61839]: value = "task-1314570" [ 868.581782] env[61839]: _type = "Task" [ 868.581782] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.590329] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314570, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.620829] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5255c305-a7b5-78f3-89fe-4ed465f3f30c, 'name': SearchDatastore_Task, 'duration_secs': 0.008618} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.621162] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.621406] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.621669] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.621798] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.622014] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.622337] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22b308f5-6e69-49d7-b683-6a6be610d0dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.633362] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.633507] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.636679] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce369076-fe22-463d-9596-12f26d5c2a58 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.642727] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 868.642727] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d36477-47e5-5e5e-4d0c-892a27a56975" [ 868.642727] env[61839]: _type = "Task" [ 868.642727] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.651075] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d36477-47e5-5e5e-4d0c-892a27a56975, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.690334] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314568, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.848171] env[61839]: DEBUG nova.network.neutron [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Updated VIF entry in instance network info cache for port c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.849024] env[61839]: DEBUG nova.network.neutron [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Updating instance_info_cache with network_info: [{"id": "c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8", "address": "fa:16:3e:91:e8:37", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc003fe42-e6", "ovs_interfaceid": "c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.853184] env[61839]: DEBUG nova.network.neutron [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updated VIF entry in instance network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.853748] env[61839]: DEBUG nova.network.neutron [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.989203] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.989894] env[61839]: DEBUG nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 868.992866] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.090s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.993174] env[61839]: DEBUG nova.objects.instance [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lazy-loading 'resources' on Instance uuid 62959833-5834-4c0a-bf4e-3ac1157b3b0c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 869.029668] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314569, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.091976] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314570, 'name': ReconfigVM_Task, 'duration_secs': 0.402695} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.092341] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Reconfigured VM instance instance-0000004d to attach disk [datastore1] e47c08c6-5de3-48b0-8327-57ddb273555f/e47c08c6-5de3-48b0-8327-57ddb273555f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.092981] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b043f6c-4b26-4033-8543-7093c9123fd5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.099463] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 869.099463] env[61839]: value = "task-1314571" [ 869.099463] env[61839]: _type = "Task" [ 869.099463] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.106930] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314571, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.152387] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d36477-47e5-5e5e-4d0c-892a27a56975, 'name': SearchDatastore_Task, 'duration_secs': 0.009672} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.153238] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bed32d8-8f17-4a4a-931b-de27b644e0a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.158743] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 869.158743] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ae6f01-9a44-6ab5-5cf8-71f46248e46f" [ 869.158743] env[61839]: _type = "Task" [ 869.158743] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.169228] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ae6f01-9a44-6ab5-5cf8-71f46248e46f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.190404] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314568, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.353581] env[61839]: DEBUG oslo_concurrency.lockutils [req-104f9144-6e8b-46fd-9897-91f1924e27ea req-9d84d5f7-05aa-4ebe-95bf-5f3e7c52d24f service nova] Releasing lock "refresh_cache-a4463efc-ffca-4552-a072-cbf5fe062533" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.358114] env[61839]: DEBUG oslo_concurrency.lockutils [req-569e8d86-6d12-424f-a0db-49bdb463a295 req-75504633-ed34-4d82-bf30-ccb3377150f2 service nova] Releasing lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.496220] env[61839]: DEBUG nova.compute.utils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 869.497752] env[61839]: DEBUG nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 869.497927] env[61839]: DEBUG nova.network.neutron [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.529309] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314569, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.541313] env[61839]: DEBUG nova.policy [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8677a31386a54087b2328734c2eadeb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8935bcc7ee644cb7a2a33557a708189c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 869.613164] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314571, 'name': Rename_Task, 'duration_secs': 0.169512} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.613469] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.613739] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f4703e7-a12e-48bc-b69e-ad789978b1fe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.622691] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 869.622691] env[61839]: value = "task-1314572" [ 869.622691] env[61839]: _type = "Task" [ 869.622691] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.630690] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.674539] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ae6f01-9a44-6ab5-5cf8-71f46248e46f, 'name': SearchDatastore_Task, 'duration_secs': 0.010453} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.674846] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.675368] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] a4463efc-ffca-4552-a072-cbf5fe062533/a4463efc-ffca-4552-a072-cbf5fe062533.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.675621] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c8e9e43-cf51-415b-b7bc-16a7f402317f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.686425] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 869.686425] env[61839]: value = "task-1314573" [ 869.686425] env[61839]: _type = "Task" [ 869.686425] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.693631] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314568, 'name': CloneVM_Task} progress is 95%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.701332] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.823219] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd7e919-3133-4be2-9acf-58f852ebb882 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.832728] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d26a63-92f2-4db1-9986-f1a6bc6be5b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.868200] env[61839]: DEBUG nova.network.neutron [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Successfully created port: 471fcd5e-1ea3-4791-9a4d-b68197f8068e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.871255] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351f9042-1401-4d3c-8c66-fdefac0ec130 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.882532] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fdda73-0e13-43f9-8593-9865df27875b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.897987] env[61839]: DEBUG nova.compute.provider_tree [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.004471] env[61839]: DEBUG nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 870.036215] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314569, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.136110] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314572, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.194870] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314568, 'name': CloneVM_Task, 'duration_secs': 1.833807} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.195661] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Created linked-clone VM from snapshot [ 870.196595] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3576ec59-a98e-4384-91ce-809396408851 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.207995] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314573, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.216145] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Uploading image 47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 870.249173] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 870.249173] env[61839]: value = "vm-281379" [ 870.249173] env[61839]: _type = "VirtualMachine" [ 870.249173] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 870.249630] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ef2505ea-478a-4093-bf58-135c740e71db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.261719] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lease: (returnval){ [ 870.261719] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cbdfd-0679-4d40-89fc-04b524ae85f9" [ 870.261719] env[61839]: _type = "HttpNfcLease" [ 870.261719] env[61839]: } obtained for exporting VM: (result){ [ 870.261719] env[61839]: value = "vm-281379" [ 870.261719] env[61839]: _type = "VirtualMachine" [ 870.261719] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 870.262175] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the lease: (returnval){ [ 870.262175] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cbdfd-0679-4d40-89fc-04b524ae85f9" [ 870.262175] env[61839]: _type = "HttpNfcLease" [ 870.262175] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 870.268232] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.268232] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cbdfd-0679-4d40-89fc-04b524ae85f9" [ 870.268232] env[61839]: _type = "HttpNfcLease" [ 870.268232] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 870.403944] env[61839]: DEBUG nova.scheduler.client.report [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 870.533254] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314569, 'name': CloneVM_Task, 'duration_secs': 1.566246} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.533640] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Created linked-clone VM from snapshot [ 870.534766] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d1b05c-a5ca-45ee-b1aa-ee9d05f0d7ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.543572] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Uploading image f5330b3b-767a-4697-b50e-19123c586f85 {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 870.569233] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 870.569233] env[61839]: value = "vm-281380" [ 870.569233] env[61839]: _type = "VirtualMachine" [ 870.569233] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 870.569233] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1c6ae09b-86e9-44b1-8b68-b216b7636214 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.573962] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease: (returnval){ [ 870.573962] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecd149-ca2e-7366-b78b-b6c79f5eb58c" [ 870.573962] env[61839]: _type = "HttpNfcLease" [ 870.573962] env[61839]: } obtained for exporting VM: (result){ [ 870.573962] env[61839]: value = "vm-281380" [ 870.573962] env[61839]: _type = "VirtualMachine" [ 870.573962] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 870.574246] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the lease: (returnval){ [ 870.574246] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecd149-ca2e-7366-b78b-b6c79f5eb58c" [ 870.574246] env[61839]: _type = "HttpNfcLease" [ 870.574246] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 870.581392] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.581392] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecd149-ca2e-7366-b78b-b6c79f5eb58c" [ 870.581392] env[61839]: _type = "HttpNfcLease" [ 870.581392] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 870.634093] env[61839]: DEBUG oslo_vmware.api [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314572, 'name': PowerOnVM_Task, 'duration_secs': 0.57385} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.634426] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.634860] env[61839]: INFO nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Took 8.44 seconds to spawn the instance on the hypervisor. [ 870.635076] env[61839]: DEBUG nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.636066] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ac3543-e6b3-4226-a517-821500fc0adc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.698013] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52935} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.698353] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] a4463efc-ffca-4552-a072-cbf5fe062533/a4463efc-ffca-4552-a072-cbf5fe062533.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.698602] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.698853] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-246ffd92-7c30-4cd1-bb65-6a15497da30f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.705193] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 870.705193] env[61839]: value = "task-1314576" [ 870.705193] env[61839]: _type = "Task" [ 870.705193] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.712617] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.772067] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.772067] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cbdfd-0679-4d40-89fc-04b524ae85f9" [ 870.772067] env[61839]: _type = "HttpNfcLease" [ 870.772067] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 870.772067] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 870.772067] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cbdfd-0679-4d40-89fc-04b524ae85f9" [ 870.772067] env[61839]: _type = "HttpNfcLease" [ 870.772067] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 870.772067] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3c7fda-99b6-474a-8dac-c75e39d8a66c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.779888] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528083bf-421c-87e9-3472-206f7a9a3b69/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 870.780261] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528083bf-421c-87e9-3472-206f7a9a3b69/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 870.894627] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2dbe6c4e-f44f-44d9-a701-3dbb1f8d5434 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.910589] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.917s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.912606] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.203s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.915611] env[61839]: INFO nova.compute.claims [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.941481] env[61839]: INFO nova.scheduler.client.report [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Deleted allocations for instance 62959833-5834-4c0a-bf4e-3ac1157b3b0c [ 871.020154] env[61839]: DEBUG nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 871.048707] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 871.049443] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 871.049443] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.049443] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 871.049443] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.049624] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 871.049890] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 871.050086] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 871.050445] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 871.050445] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 871.050607] env[61839]: DEBUG nova.virt.hardware [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 871.051504] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916086f0-d89e-4692-a0b5-ff7e817c7cb7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.060446] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e716b3-16a7-41b3-a3d9-a8abd58a1a37 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.085317] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 871.085317] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecd149-ca2e-7366-b78b-b6c79f5eb58c" [ 871.085317] env[61839]: _type = "HttpNfcLease" [ 871.085317] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 871.085317] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 871.085317] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ecd149-ca2e-7366-b78b-b6c79f5eb58c" [ 871.085317] env[61839]: _type = "HttpNfcLease" [ 871.085317] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 871.085687] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ee5f8d-5e69-4e56-914d-0a71005cde97 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.093017] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e27b49-ecc1-7327-1a64-b8749fbcb3c2/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 871.093216] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e27b49-ecc1-7327-1a64-b8749fbcb3c2/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 871.161088] env[61839]: INFO nova.compute.manager [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Took 27.62 seconds to build instance. [ 871.215721] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173568} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.216028] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.216846] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dce8bbc-ed7a-4021-b853-f1f7e21e6e37 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.242599] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] a4463efc-ffca-4552-a072-cbf5fe062533/a4463efc-ffca-4552-a072-cbf5fe062533.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.245313] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95589ec3-833f-41d1-ba56-a8de6711b8ad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.259958] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b427fdbc-7a17-4415-a325-d88f01a04ea5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.270965] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 871.270965] env[61839]: value = "task-1314577" [ 871.270965] env[61839]: _type = "Task" [ 871.270965] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.281058] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314577, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.460052] env[61839]: DEBUG oslo_concurrency.lockutils [None req-24cdc7ff-c17f-45b0-adfd-87c2d87da1aa tempest-SecurityGroupsTestJSON-191177858 tempest-SecurityGroupsTestJSON-191177858-project-member] Lock "62959833-5834-4c0a-bf4e-3ac1157b3b0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.813s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.663700] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a6fa6e9f-3ab4-4eb2-8dbd-9e4dcff4ce69 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.551s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.781720] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314577, 'name': ReconfigVM_Task, 'duration_secs': 0.473607} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.782725] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Reconfigured VM instance instance-0000004e to attach disk [datastore1] a4463efc-ffca-4552-a072-cbf5fe062533/a4463efc-ffca-4552-a072-cbf5fe062533.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.784114] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9f790f9-5b7f-4044-aab6-6cd7e969fa4c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.791486] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 871.791486] env[61839]: value = "task-1314578" [ 871.791486] env[61839]: _type = "Task" [ 871.791486] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.801614] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314578, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.848081] env[61839]: DEBUG nova.compute.manager [req-5dc8ca0c-2997-4d67-bbc3-068b7542d8dc req-9558f684-71c9-4d2f-8ae6-a6549d9e2dc4 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Received event network-vif-plugged-471fcd5e-1ea3-4791-9a4d-b68197f8068e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.850509] env[61839]: DEBUG oslo_concurrency.lockutils [req-5dc8ca0c-2997-4d67-bbc3-068b7542d8dc req-9558f684-71c9-4d2f-8ae6-a6549d9e2dc4 service nova] Acquiring lock "fa8a2265-291e-4424-bea1-72574e495a72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.851444] env[61839]: DEBUG oslo_concurrency.lockutils [req-5dc8ca0c-2997-4d67-bbc3-068b7542d8dc req-9558f684-71c9-4d2f-8ae6-a6549d9e2dc4 service nova] Lock "fa8a2265-291e-4424-bea1-72574e495a72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.852951] env[61839]: DEBUG oslo_concurrency.lockutils [req-5dc8ca0c-2997-4d67-bbc3-068b7542d8dc req-9558f684-71c9-4d2f-8ae6-a6549d9e2dc4 service nova] Lock "fa8a2265-291e-4424-bea1-72574e495a72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.854713] env[61839]: DEBUG nova.compute.manager [req-5dc8ca0c-2997-4d67-bbc3-068b7542d8dc req-9558f684-71c9-4d2f-8ae6-a6549d9e2dc4 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] No waiting events found dispatching network-vif-plugged-471fcd5e-1ea3-4791-9a4d-b68197f8068e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 871.855147] env[61839]: WARNING nova.compute.manager [req-5dc8ca0c-2997-4d67-bbc3-068b7542d8dc req-9558f684-71c9-4d2f-8ae6-a6549d9e2dc4 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Received unexpected event network-vif-plugged-471fcd5e-1ea3-4791-9a4d-b68197f8068e for instance with vm_state building and task_state spawning. [ 872.143930] env[61839]: DEBUG nova.network.neutron [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Successfully updated port: 471fcd5e-1ea3-4791-9a4d-b68197f8068e {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 872.307139] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314578, 'name': Rename_Task, 'duration_secs': 0.193056} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.307646] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.308057] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51db1080-8383-4fe8-8fb3-401d2a59ec57 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.321358] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 872.321358] env[61839]: value = "task-1314579" [ 872.321358] env[61839]: _type = "Task" [ 872.321358] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.334560] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314579, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.352219] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2122c7-3150-4a16-aeb1-1708c9bf0e2b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.361377] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d48826-fd30-4791-af40-c4d3dc17fc82 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.403101] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3977463-55e9-45b0-83dc-6cc42abafdaf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.411876] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048de829-3c58-4d80-bb9d-f6e728991907 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.426525] env[61839]: DEBUG nova.compute.provider_tree [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.542132] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "e47c08c6-5de3-48b0-8327-57ddb273555f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.545049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.545049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "e47c08c6-5de3-48b0-8327-57ddb273555f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.545049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.545049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.545554] env[61839]: INFO nova.compute.manager [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Terminating instance [ 872.550537] env[61839]: DEBUG nova.compute.manager [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 872.550537] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 872.551175] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219a4563-ed00-408c-95e5-c59973c86a5c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.559682] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 872.560079] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-050b1e20-8269-4912-a14a-7f5a52e7c418 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.568927] env[61839]: DEBUG oslo_vmware.api [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 872.568927] env[61839]: value = "task-1314580" [ 872.568927] env[61839]: _type = "Task" [ 872.568927] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.581378] env[61839]: DEBUG oslo_vmware.api [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.646693] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.646693] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.646693] env[61839]: DEBUG nova.network.neutron [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.835912] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314579, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.935061] env[61839]: DEBUG nova.scheduler.client.report [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 873.081085] env[61839]: DEBUG oslo_vmware.api [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314580, 'name': PowerOffVM_Task, 'duration_secs': 0.199546} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.081545] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 873.081545] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 873.082185] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fb8d725-0318-4cdd-9b9e-f2f2f19be77e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.153425] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 873.153763] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 873.154246] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleting the datastore file [datastore1] e47c08c6-5de3-48b0-8327-57ddb273555f {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 873.155034] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06c41cae-8e06-4b6f-8d65-23c0062447d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.162202] env[61839]: DEBUG oslo_vmware.api [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 873.162202] env[61839]: value = "task-1314582" [ 873.162202] env[61839]: _type = "Task" [ 873.162202] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.172957] env[61839]: DEBUG oslo_vmware.api [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.202700] env[61839]: DEBUG nova.network.neutron [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.332873] env[61839]: DEBUG oslo_vmware.api [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314579, 'name': PowerOnVM_Task, 'duration_secs': 0.674577} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.333220] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.333474] env[61839]: INFO nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Took 8.66 seconds to spawn the instance on the hypervisor. [ 873.333683] env[61839]: DEBUG nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 873.334609] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f6cf93-c8e3-43b9-a76b-9506990596c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.440596] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.441348] env[61839]: DEBUG nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 873.447869] env[61839]: DEBUG nova.network.neutron [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance_info_cache with network_info: [{"id": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "address": "fa:16:3e:48:06:f8", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471fcd5e-1e", "ovs_interfaceid": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.449559] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.564s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.449873] env[61839]: DEBUG nova.objects.instance [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lazy-loading 'resources' on Instance uuid ce59c937-fc0b-464f-baaa-461c6f6c2d57 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.673186] env[61839]: DEBUG oslo_vmware.api [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17518} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.673510] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 873.673707] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 873.673894] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 873.674100] env[61839]: INFO nova.compute.manager [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 873.674398] env[61839]: DEBUG oslo.service.loopingcall [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 873.674729] env[61839]: DEBUG nova.compute.manager [-] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 873.674824] env[61839]: DEBUG nova.network.neutron [-] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 873.853562] env[61839]: INFO nova.compute.manager [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Took 29.59 seconds to build instance. [ 873.953060] env[61839]: DEBUG nova.compute.utils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 873.954662] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.954943] env[61839]: DEBUG nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Instance network_info: |[{"id": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "address": "fa:16:3e:48:06:f8", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471fcd5e-1e", "ovs_interfaceid": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 873.956081] env[61839]: DEBUG nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 873.956381] env[61839]: DEBUG nova.network.neutron [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 873.958405] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:06:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '471fcd5e-1ea3-4791-9a4d-b68197f8068e', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.966211] env[61839]: DEBUG oslo.service.loopingcall [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 873.969248] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.971781] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1eff7760-2c06-4a85-9b60-3db53ef126b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.992736] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.992736] env[61839]: value = "task-1314583" [ 873.992736] env[61839]: _type = "Task" [ 873.992736] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.002110] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314583, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.023567] env[61839]: DEBUG nova.policy [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa847c225cb74169add166317c4d8eba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '766a6b1d62ed40bfa5fe23e553b1d0f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 874.269233] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e394df-432b-4b04-a75f-eee8165e0580 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.280610] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e030a6a-83e2-4a17-9c68-5baaf02dbff3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.313616] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ea2c1e-bbc5-4045-b126-836ff2e20a1b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.321816] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df6375d-8950-43f8-a1f5-1521837e995a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.338278] env[61839]: DEBUG nova.compute.provider_tree [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.340425] env[61839]: DEBUG nova.network.neutron [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Successfully created port: 7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.356034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b723d18-6a91-468b-b8d0-3e444fa3d9c3 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.939s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.431681] env[61839]: DEBUG nova.network.neutron [-] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.456963] env[61839]: DEBUG nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 874.503046] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314583, 'name': CreateVM_Task, 'duration_secs': 0.437264} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.503278] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.503989] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.504204] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.504564] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 874.504830] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce61cf60-735b-46f1-8599-f59b315d26cb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.509862] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 874.509862] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dfd9c9-bde5-5e5b-5990-1b3c1fcbce43" [ 874.509862] env[61839]: _type = "Task" [ 874.509862] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.518844] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dfd9c9-bde5-5e5b-5990-1b3c1fcbce43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.844407] env[61839]: DEBUG nova.scheduler.client.report [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 874.936769] env[61839]: INFO nova.compute.manager [-] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Took 1.26 seconds to deallocate network for instance. [ 874.966026] env[61839]: INFO nova.virt.block_device [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Booting with volume b82148bd-7b88-45c7-b95b-5f60f19c65e5 at /dev/sda [ 875.009805] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-510b2300-f8a9-447f-b4b9-6b143a1604e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.023848] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dfd9c9-bde5-5e5b-5990-1b3c1fcbce43, 'name': SearchDatastore_Task, 'duration_secs': 0.010356} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.025603] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.025882] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.026147] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.026521] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.026755] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.027093] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8769859-5c82-4e79-b066-62bc001626b9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.032819] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97656d68-dceb-4056-9d96-0a65b57de439 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.071499] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cbfa27a6-6105-4431-adc7-35a54950ca4d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.073749] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.073980] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.074773] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b472d57b-56f5-4d30-bffa-816990ce4787 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.081201] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 875.081201] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522de802-6439-d21e-fca5-83aa86058966" [ 875.081201] env[61839]: _type = "Task" [ 875.081201] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.091923] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a45f71-203a-4145-bb7f-bc568b8e0042 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.111726] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522de802-6439-d21e-fca5-83aa86058966, 'name': SearchDatastore_Task, 'duration_secs': 0.010607} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.112628] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86dd19e6-d490-4415-a544-c7627d47f69a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.128722] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367c0522-eac8-4a7d-b32d-59407a8dbf72 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.133058] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 875.133058] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522430fc-2feb-ac98-65d9-0809498ae371" [ 875.133058] env[61839]: _type = "Task" [ 875.133058] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.140386] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eda367-3beb-4c13-a37c-83f007ff9b01 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.146054] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522430fc-2feb-ac98-65d9-0809498ae371, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.157660] env[61839]: DEBUG nova.virt.block_device [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Updating existing volume attachment record: 5a3a14f5-1ade-4b57-ba19-f2a08e1c8738 {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 875.350156] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.901s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.353374] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.424s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.353374] env[61839]: DEBUG nova.objects.instance [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lazy-loading 'resources' on Instance uuid c996d7db-4b73-4445-9989-4efb2cd852e8 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.373416] env[61839]: INFO nova.scheduler.client.report [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleted allocations for instance ce59c937-fc0b-464f-baaa-461c6f6c2d57 [ 875.443776] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.645119] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522430fc-2feb-ac98-65d9-0809498ae371, 'name': SearchDatastore_Task, 'duration_secs': 0.015114} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.645520] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.645790] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] fa8a2265-291e-4424-bea1-72574e495a72/fa8a2265-291e-4424-bea1-72574e495a72.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.646103] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f44258bf-80a7-445d-8d45-053d0068a68c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.652811] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 875.652811] env[61839]: value = "task-1314584" [ 875.652811] env[61839]: _type = "Task" [ 875.652811] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.662814] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.875669] env[61839]: DEBUG nova.network.neutron [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Successfully updated port: 7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.884482] env[61839]: DEBUG oslo_concurrency.lockutils [None req-deec7b9d-0a44-4693-a38c-548a3860cdb1 tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "ce59c937-fc0b-464f-baaa-461c6f6c2d57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.172s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.165915] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314584, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.180608] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1198d72d-7f8c-4419-91f6-9bf18166cbda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.188215] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa069302-fbdc-489b-a4d0-e506e1e4cf2e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.218904] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fc4334-c086-4227-bc6e-90de4049aa21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.229571] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6769d60c-4015-44d2-8201-4c2539c7d17d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.244357] env[61839]: DEBUG nova.compute.provider_tree [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.379053] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquiring lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.379269] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquired lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.379410] env[61839]: DEBUG nova.network.neutron [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.664145] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314584, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521516} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.664491] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] fa8a2265-291e-4424-bea1-72574e495a72/fa8a2265-291e-4424-bea1-72574e495a72.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 876.664719] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 876.664986] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-388f8125-ef25-4cdc-af8e-f49c60d3a077 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.671642] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 876.671642] env[61839]: value = "task-1314585" [ 876.671642] env[61839]: _type = "Task" [ 876.671642] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.681369] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314585, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.748156] env[61839]: DEBUG nova.scheduler.client.report [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 876.917943] env[61839]: DEBUG nova.network.neutron [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.073171] env[61839]: DEBUG nova.network.neutron [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Updating instance_info_cache with network_info: [{"id": "7dee6a06-890e-4663-a919-d96beac69d5d", "address": "fa:16:3e:ff:68:87", "network": {"id": "daab8b4e-d8f9-408d-874d-b68ea25fd50c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1447004582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "766a6b1d62ed40bfa5fe23e553b1d0f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dee6a06-89", "ovs_interfaceid": "7dee6a06-890e-4663-a919-d96beac69d5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.182943] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314585, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099862} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.183475] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.184218] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2432b0f-bbb3-48d8-a78b-d98326a95b97 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.212776] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] fa8a2265-291e-4424-bea1-72574e495a72/fa8a2265-291e-4424-bea1-72574e495a72.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.212776] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91b66ffe-2a39-4e33-9275-f19ffa3f2888 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.234715] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 877.234715] env[61839]: value = "task-1314586" [ 877.234715] env[61839]: _type = "Task" [ 877.234715] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.244277] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.258522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.901s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.258522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.906s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.258522] env[61839]: DEBUG nova.objects.instance [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lazy-loading 'resources' on Instance uuid 2cb53e37-8b0b-48b7-a973-061b91df46df {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.276026] env[61839]: DEBUG nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 877.276026] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.276026] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.276026] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.276026] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.276026] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.276646] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.277032] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.277411] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.277736] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.278070] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.278400] env[61839]: DEBUG nova.virt.hardware [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.279809] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbd7823-0aa8-46f9-a07b-387097507067 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.295358] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52d12c7-dfb9-4244-a202-3aa00beeaadc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.323027] env[61839]: INFO nova.scheduler.client.report [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleted allocations for instance c996d7db-4b73-4445-9989-4efb2cd852e8 [ 877.579967] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Releasing lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.580514] env[61839]: DEBUG nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance network_info: |[{"id": "7dee6a06-890e-4663-a919-d96beac69d5d", "address": "fa:16:3e:ff:68:87", "network": {"id": "daab8b4e-d8f9-408d-874d-b68ea25fd50c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1447004582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "766a6b1d62ed40bfa5fe23e553b1d0f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dee6a06-89", "ovs_interfaceid": "7dee6a06-890e-4663-a919-d96beac69d5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 877.581109] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:68:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dee6a06-890e-4663-a919-d96beac69d5d', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.591277] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Creating folder: Project (766a6b1d62ed40bfa5fe23e553b1d0f6). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.591642] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14e1edea-e45d-4959-842a-a898832fba66 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.608025] env[61839]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 877.608025] env[61839]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61839) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 877.608025] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Folder already exists: Project (766a6b1d62ed40bfa5fe23e553b1d0f6). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 877.608234] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Creating folder: Instances. Parent ref: group-v281352. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.608351] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0bb9c1c-4628-48ad-8dde-c6fcfb4adcef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.617950] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Created folder: Instances in parent group-v281352. [ 877.619187] env[61839]: DEBUG oslo.service.loopingcall [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.619187] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.619187] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f095c02-f457-48e0-880c-36cbafaae0e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.638160] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.638160] env[61839]: value = "task-1314589" [ 877.638160] env[61839]: _type = "Task" [ 877.638160] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.646617] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314589, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.745178] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314586, 'name': ReconfigVM_Task, 'duration_secs': 0.314835} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.745483] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Reconfigured VM instance instance-0000004f to attach disk [datastore2] fa8a2265-291e-4424-bea1-72574e495a72/fa8a2265-291e-4424-bea1-72574e495a72.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 877.746181] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0feece9-b74e-4027-93c7-19cf53c138ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.753339] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 877.753339] env[61839]: value = "task-1314590" [ 877.753339] env[61839]: _type = "Task" [ 877.753339] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.766724] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314590, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.831418] env[61839]: DEBUG oslo_concurrency.lockutils [None req-049d7e44-9b50-45d0-afa0-68a572684801 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "c996d7db-4b73-4445-9989-4efb2cd852e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.469s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.011023] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4f9c35-5c92-436b-9608-e14792a84522 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.017818] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f1166a-1c64-4671-b15f-b777c1448260 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.049277] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e87085-518c-4d0c-8cf4-cfd01f2c76de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.058015] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4d6317-1fdc-432d-aa76-bc3ef5a84a9d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.073139] env[61839]: DEBUG nova.compute.provider_tree [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.148653] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314589, 'name': CreateVM_Task, 'duration_secs': 0.374069} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.148887] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.149574] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281364', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'name': 'volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd187e75f-39a9-467b-b5ef-e2772d9b71af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'serial': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5'}, 'guest_format': None, 'attachment_id': '5a3a14f5-1ade-4b57-ba19-f2a08e1c8738', 'mount_device': '/dev/sda', 'delete_on_termination': True, 'boot_index': 0, 'volume_type': None}], 'swap': None} {{(pid=61839) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 878.149859] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Root volume attach. Driver type: vmdk {{(pid=61839) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 878.151029] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd38e38-0035-4128-9d93-f7557c21ac92 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.158973] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8bfd9b-7fb0-41a9-b4da-cb393e9292cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.165587] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d22aac-edac-471b-bfc0-a5fc80ba1cea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.173098] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-a676c842-2d27-45cb-94cd-cbc99ccbccb2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.179078] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 878.179078] env[61839]: value = "task-1314591" [ 878.179078] env[61839]: _type = "Task" [ 878.179078] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.187037] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.265221] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314590, 'name': Rename_Task, 'duration_secs': 0.221023} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.265867] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.265867] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-410860dc-fb74-4fbb-8ad0-a541b28fc8c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.276375] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 878.276375] env[61839]: value = "task-1314592" [ 878.276375] env[61839]: _type = "Task" [ 878.276375] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.286837] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314592, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.577159] env[61839]: DEBUG nova.scheduler.client.report [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 878.690383] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task} progress is 40%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.787131] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314592, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.082716] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.086034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.715s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.086034] env[61839]: DEBUG nova.objects.instance [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lazy-loading 'resources' on Instance uuid c180cc04-79da-4529-a905-1985a85b7cf5 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.107124] env[61839]: INFO nova.scheduler.client.report [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Deleted allocations for instance 2cb53e37-8b0b-48b7-a973-061b91df46df [ 879.192058] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task} progress is 53%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.289237] env[61839]: DEBUG oslo_vmware.api [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314592, 'name': PowerOnVM_Task, 'duration_secs': 0.841724} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.289850] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.290320] env[61839]: INFO nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Took 8.27 seconds to spawn the instance on the hypervisor. [ 879.290684] env[61839]: DEBUG nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 879.292038] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5b6ffa-9ad6-4cfb-a1b3-947c6b721b98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.585513] env[61839]: DEBUG nova.compute.manager [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Received event network-changed-471fcd5e-1ea3-4791-9a4d-b68197f8068e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.585513] env[61839]: DEBUG nova.compute.manager [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Refreshing instance network info cache due to event network-changed-471fcd5e-1ea3-4791-9a4d-b68197f8068e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 879.585912] env[61839]: DEBUG oslo_concurrency.lockutils [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] Acquiring lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.586297] env[61839]: DEBUG oslo_concurrency.lockutils [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] Acquired lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.586565] env[61839]: DEBUG nova.network.neutron [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Refreshing network info cache for port 471fcd5e-1ea3-4791-9a4d-b68197f8068e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.618687] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1496383a-3c58-427a-8916-58f524015f47 tempest-ServersTestMultiNic-852651455 tempest-ServersTestMultiNic-852651455-project-member] Lock "2cb53e37-8b0b-48b7-a973-061b91df46df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.517s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.631329] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528083bf-421c-87e9-3472-206f7a9a3b69/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 879.631329] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4864224b-f0a0-467c-adcf-42941debe320 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.647372] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528083bf-421c-87e9-3472-206f7a9a3b69/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 879.647680] env[61839]: ERROR oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528083bf-421c-87e9-3472-206f7a9a3b69/disk-0.vmdk due to incomplete transfer. [ 879.648891] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e496ba6f-c740-40b3-a4fc-3542d9385b91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.657046] env[61839]: DEBUG oslo_vmware.rw_handles [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528083bf-421c-87e9-3472-206f7a9a3b69/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 879.657046] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Uploaded image 47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 879.660079] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 879.660515] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2b91c900-73a8-495a-9cea-d6aecd3dacb2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.668415] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 879.668415] env[61839]: value = "task-1314593" [ 879.668415] env[61839]: _type = "Task" [ 879.668415] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.685126] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314593, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.697417] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task} progress is 67%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.821828] env[61839]: INFO nova.compute.manager [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Took 31.68 seconds to build instance. [ 879.888192] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce93e463-79a5-4429-879c-f480c4155772 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.901015] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc85926-1f77-446f-bf4a-5e43ad5d1a57 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.933880] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d13864-ebf8-4248-87b8-64ee7fd7a537 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.941891] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e94559d-8d8b-4442-a050-72e1885baa9a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.955999] env[61839]: DEBUG nova.compute.provider_tree [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.144934] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.145733] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.145733] env[61839]: INFO nova.compute.manager [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Shelving [ 880.180016] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314593, 'name': Destroy_Task, 'duration_secs': 0.429617} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.180016] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Destroyed the VM [ 880.180340] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 880.180532] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-50b64aa1-ef2d-4ec8-8c87-e8fd84d1cc0a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.189612] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 880.189612] env[61839]: value = "task-1314594" [ 880.189612] env[61839]: _type = "Task" [ 880.189612] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.195519] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task} progress is 78%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.204430] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314594, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.323729] env[61839]: DEBUG oslo_concurrency.lockutils [None req-94f6b642-8cf2-4949-b1bb-e096f0676014 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.537s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.459382] env[61839]: DEBUG nova.scheduler.client.report [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.654757] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.654757] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3e1be93-e81a-4cb4-bc9b-bc1acbb861b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.664321] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 880.664321] env[61839]: value = "task-1314595" [ 880.664321] env[61839]: _type = "Task" [ 880.664321] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.673616] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.693784] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task} progress is 95%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.704674] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314594, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.731726] env[61839]: DEBUG nova.network.neutron [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updated VIF entry in instance network info cache for port 471fcd5e-1ea3-4791-9a4d-b68197f8068e. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.733040] env[61839]: DEBUG nova.network.neutron [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance_info_cache with network_info: [{"id": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "address": "fa:16:3e:48:06:f8", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471fcd5e-1e", "ovs_interfaceid": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.970877] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.972432] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.345s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.972667] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.973143] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 880.973721] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.530s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.974763] env[61839]: DEBUG nova.objects.instance [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'resources' on Instance uuid e47c08c6-5de3-48b0-8327-57ddb273555f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.975784] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703b109e-ed25-4e14-b287-576af26a928b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.987079] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8283df3-81e5-4ec1-967c-98e174944865 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.003635] env[61839]: INFO nova.scheduler.client.report [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Deleted allocations for instance c180cc04-79da-4529-a905-1985a85b7cf5 [ 881.005858] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d90ec71-0c4c-4d93-9337-fd46c0a9c5c4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.018168] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f169f0-bac3-4319-930c-10642252de46 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.050318] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180304MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 881.050515] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.172808] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314595, 'name': PowerOffVM_Task, 'duration_secs': 0.307651} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.173165] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.173998] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c573074f-cc4d-4c03-b7ac-b1027d2b16eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.197758] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182fe680-087e-4068-ae53-b1afe63811e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.209464] env[61839]: DEBUG oslo_vmware.api [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314594, 'name': RemoveSnapshot_Task, 'duration_secs': 0.896769} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.214689] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 881.215029] env[61839]: INFO nova.compute.manager [None req-fd02d93b-35a5-4563-a938-4a1df4c5d91d tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Took 15.14 seconds to snapshot the instance on the hypervisor. [ 881.222077] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task} progress is 97%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.222695] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a4463efc-ffca-4552-a072-cbf5fe062533" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.222807] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.236800] env[61839]: DEBUG oslo_concurrency.lockutils [req-f573c9c5-d755-4952-bf5f-bb16ac322e1c req-f57484d1-3e1e-4031-b9c4-31e3cdf58120 service nova] Releasing lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.516440] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ddc0f150-48b1-405a-990f-af9b8e487cda tempest-MultipleCreateTestJSON-1549308977 tempest-MultipleCreateTestJSON-1549308977-project-member] Lock "c180cc04-79da-4529-a905-1985a85b7cf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.726s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.699476] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5104e3-36a0-4241-bf4b-2eecb4c5fd92 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.710353] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74854e43-842c-4025-aba7-6acf06ecfb9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.713820] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314591, 'name': RelocateVM_Task, 'duration_secs': 3.445094} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.714187] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 881.715031] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281364', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'name': 'volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd187e75f-39a9-467b-b5ef-e2772d9b71af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'serial': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 881.715679] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64db10c9-6812-45dc-9e20-4f6f486e8fd7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.748176] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 881.749128] env[61839]: DEBUG nova.compute.utils [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.752458] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0a9aba96-c658-457d-b813-45476ffb5ea6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.757022] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af5107d-5a43-44eb-815b-2bea2098686a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.772685] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5520e1-acc6-42c1-a5be-2af1f471b20e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.778816] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 881.778816] env[61839]: value = "task-1314596" [ 881.778816] env[61839]: _type = "Task" [ 881.778816] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.783358] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d7c5b1-c3ab-4a0d-8342-5c185ce6cba5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.806826] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5/volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.810154] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3f63922-9553-4d9e-a81f-7ba4c05d636b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.840560] env[61839]: DEBUG nova.compute.provider_tree [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.849656] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "042183e2-d203-4d07-a668-df24ba50e242" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.849921] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "042183e2-d203-4d07-a668-df24ba50e242" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.851026] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314596, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.853311] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 881.853311] env[61839]: value = "task-1314597" [ 881.853311] env[61839]: _type = "Task" [ 881.853311] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.862545] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314597, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.004229] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e27b49-ecc1-7327-1a64-b8749fbcb3c2/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 882.004229] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48202ea0-679e-4c0e-bd75-ece48ff001dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.010790] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e27b49-ecc1-7327-1a64-b8749fbcb3c2/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 882.011192] env[61839]: ERROR oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e27b49-ecc1-7327-1a64-b8749fbcb3c2/disk-0.vmdk due to incomplete transfer. [ 882.011543] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2a6543a7-9bc4-4baf-9797-08cf95d4a218 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.021525] env[61839]: DEBUG oslo_vmware.rw_handles [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e27b49-ecc1-7327-1a64-b8749fbcb3c2/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 882.021980] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Uploaded image f5330b3b-767a-4697-b50e-19123c586f85 to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 882.023701] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 882.024088] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-29afb4b8-3ac9-4647-9aba-fa8d53a97cf5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.030936] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 882.030936] env[61839]: value = "task-1314598" [ 882.030936] env[61839]: _type = "Task" [ 882.030936] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.039226] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314598, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.254062] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.030s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.093383] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "39728872-2d30-48eb-90da-412f1e45971c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.093677] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "39728872-2d30-48eb-90da-412f1e45971c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.093872] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "39728872-2d30-48eb-90da-412f1e45971c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.094074] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "39728872-2d30-48eb-90da-412f1e45971c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.094301] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "39728872-2d30-48eb-90da-412f1e45971c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.096898] env[61839]: DEBUG nova.scheduler.client.report [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 883.099605] env[61839]: DEBUG nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 883.104435] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314596, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.104435] env[61839]: WARNING oslo_vmware.common.loopingcall [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] task run outlasted interval by 0.323882 sec [ 883.104851] env[61839]: INFO nova.compute.manager [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Terminating instance [ 883.110805] env[61839]: DEBUG nova.compute.manager [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Received event network-vif-deleted-c47afc9a-8a60-4c82-9548-b644e8e26492 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.111034] env[61839]: DEBUG nova.compute.manager [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Received event network-vif-plugged-7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.111228] env[61839]: DEBUG oslo_concurrency.lockutils [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] Acquiring lock "d187e75f-39a9-467b-b5ef-e2772d9b71af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.111429] env[61839]: DEBUG oslo_concurrency.lockutils [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.111589] env[61839]: DEBUG oslo_concurrency.lockutils [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.111749] env[61839]: DEBUG nova.compute.manager [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] No waiting events found dispatching network-vif-plugged-7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 883.111907] env[61839]: WARNING nova.compute.manager [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Received unexpected event network-vif-plugged-7dee6a06-890e-4663-a919-d96beac69d5d for instance with vm_state building and task_state spawning. [ 883.112206] env[61839]: DEBUG nova.compute.manager [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Received event network-changed-7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.113806] env[61839]: DEBUG nova.compute.manager [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Refreshing instance network info cache due to event network-changed-7dee6a06-890e-4663-a919-d96beac69d5d. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 883.113806] env[61839]: DEBUG oslo_concurrency.lockutils [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] Acquiring lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.113806] env[61839]: DEBUG oslo_concurrency.lockutils [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] Acquired lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.113806] env[61839]: DEBUG nova.network.neutron [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Refreshing network info cache for port 7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.121079] env[61839]: DEBUG nova.compute.manager [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 883.121079] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.123963] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f14ebf-5b0f-4fa4-9334-6eb54c4d865d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.136017] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314598, 'name': Destroy_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.136242] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314597, 'name': ReconfigVM_Task, 'duration_secs': 0.692295} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.141105] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Reconfigured VM instance instance-00000050 to attach disk [datastore1] volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5/volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.146273] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314596, 'name': CreateSnapshot_Task, 'duration_secs': 1.1995} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.146509] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.146997] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fd5f55d-3b73-4d2c-9af6-b387594fecd4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.156856] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 883.157143] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a744715c-c576-4531-b9a4-92b93a2889de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.159957] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c57f66f-bc5c-4ce5-8777-8dc738edc012 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.172584] env[61839]: DEBUG oslo_vmware.api [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 883.172584] env[61839]: value = "task-1314599" [ 883.172584] env[61839]: _type = "Task" [ 883.172584] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.172779] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 883.172779] env[61839]: value = "task-1314600" [ 883.172779] env[61839]: _type = "Task" [ 883.172779] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.190355] env[61839]: DEBUG oslo_vmware.api [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.194087] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.611812] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.638s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.615592] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.565s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.633086] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a4463efc-ffca-4552-a072-cbf5fe062533" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.634458] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.634458] env[61839]: INFO nova.compute.manager [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Attaching volume 0b8e7770-f390-47f6-b69f-12c3b2a6f497 to /dev/sdb [ 883.635310] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314598, 'name': Destroy_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.636711] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.650126] env[61839]: INFO nova.scheduler.client.report [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted allocations for instance e47c08c6-5de3-48b0-8327-57ddb273555f [ 883.681151] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 883.684081] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f4a718bf-b714-4d04-abe9-5dc65323dd3e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.696159] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7182c57-0fdb-434c-a463-6a37b753a67c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.707254] env[61839]: DEBUG oslo_vmware.api [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314599, 'name': PowerOffVM_Task, 'duration_secs': 0.246895} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.712568] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.712568] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.712689] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 883.712689] env[61839]: value = "task-1314601" [ 883.712689] env[61839]: _type = "Task" [ 883.712689] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.712893] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314600, 'name': ReconfigVM_Task, 'duration_secs': 0.181383} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.713908] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51bc3cc1-05d3-4dfa-b1d4-858c827013c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.715533] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281364', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'name': 'volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd187e75f-39a9-467b-b5ef-e2772d9b71af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'serial': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 883.716487] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc953f7-be2f-4c7e-96fe-dee78a3e22fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.722757] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f915a408-b2f5-425b-bc4e-d23b5fa90aeb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.731231] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314601, 'name': CloneVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.732411] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 883.732411] env[61839]: value = "task-1314603" [ 883.732411] env[61839]: _type = "Task" [ 883.732411] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.738408] env[61839]: DEBUG nova.virt.block_device [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Updating existing volume attachment record: a54d9b16-042a-4106-8b6d-46055ba7b43b {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 883.745085] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314603, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.799177] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 883.799416] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 883.799611] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Deleting the datastore file [datastore1] 39728872-2d30-48eb-90da-412f1e45971c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.800555] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-deb9b120-d424-4942-a28e-aefe7e08d726 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.806780] env[61839]: DEBUG oslo_vmware.api [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for the task: (returnval){ [ 883.806780] env[61839]: value = "task-1314604" [ 883.806780] env[61839]: _type = "Task" [ 883.806780] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.815029] env[61839]: DEBUG oslo_vmware.api [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314604, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.114639] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314598, 'name': Destroy_Task, 'duration_secs': 1.721248} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.115051] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Destroyed the VM [ 884.115322] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 884.115622] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-92725add-4d8d-4afc-a396-13126aedd64c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.127915] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 884.127915] env[61839]: value = "task-1314608" [ 884.127915] env[61839]: _type = "Task" [ 884.127915] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.137951] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314608, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.160813] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d9488d-cb37-4568-96d9-8af15aa81167 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "e47c08c6-5de3-48b0-8327-57ddb273555f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.618s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.166082] env[61839]: DEBUG nova.network.neutron [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Updated VIF entry in instance network info cache for port 7dee6a06-890e-4663-a919-d96beac69d5d. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.166486] env[61839]: DEBUG nova.network.neutron [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Updating instance_info_cache with network_info: [{"id": "7dee6a06-890e-4663-a919-d96beac69d5d", "address": "fa:16:3e:ff:68:87", "network": {"id": "daab8b4e-d8f9-408d-874d-b68ea25fd50c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1447004582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "766a6b1d62ed40bfa5fe23e553b1d0f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dee6a06-89", "ovs_interfaceid": "7dee6a06-890e-4663-a919-d96beac69d5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.226129] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314601, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.243412] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314603, 'name': Rename_Task, 'duration_secs': 0.336651} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.243728] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 884.244129] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89337247-50ac-4102-bcee-a3129e6ff065 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.251743] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 884.251743] env[61839]: value = "task-1314609" [ 884.251743] env[61839]: _type = "Task" [ 884.251743] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.263462] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.321363] env[61839]: DEBUG oslo_vmware.api [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Task: {'id': task-1314604, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227764} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.322059] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.322059] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.322667] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.322667] env[61839]: INFO nova.compute.manager [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Took 1.20 seconds to destroy the instance on the hypervisor. [ 884.322907] env[61839]: DEBUG oslo.service.loopingcall [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.323137] env[61839]: DEBUG nova.compute.manager [-] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.323278] env[61839]: DEBUG nova.network.neutron [-] [instance: 39728872-2d30-48eb-90da-412f1e45971c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.485716] env[61839]: DEBUG nova.compute.manager [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Stashing vm_state: active {{(pid=61839) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 884.639937] env[61839]: INFO nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating resource usage from migration 833b7c1c-6dd7-46e6-b8d3-07af4e28c4d6 [ 884.648032] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314608, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.666667] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5c29c188-a34b-4751-9f8b-166af7b15088 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.666853] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 603191b6-a4b0-451b-b98b-f3dbfb684300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.667138] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 86525ea7-af75-4b10-85a1-c0fbab73ea5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.667259] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.667407] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.667527] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 694a5d4b-3673-406b-a24a-d37fad33e549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.667643] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 39728872-2d30-48eb-90da-412f1e45971c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.667780] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 40c54d84-8e50-483a-b4e0-5f1cc72b0880 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.667908] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.668042] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance ef49a6f5-27c3-4595-af65-d6a5aa47d4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.668168] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 56369316-a445-4a2a-a0a6-967074104e19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.668269] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a4463efc-ffca-4552-a072-cbf5fe062533 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.668383] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d187e75f-39a9-467b-b5ef-e2772d9b71af actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.675407] env[61839]: DEBUG oslo_concurrency.lockutils [req-946613e9-e38f-496c-9d99-a5981c50f0b7 req-e723cbb7-456b-4a51-be0c-971b1e3a5454 service nova] Releasing lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.726327] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314601, 'name': CloneVM_Task} progress is 95%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.765136] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314609, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.017653] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.125302] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "65f34f9e-353a-4f94-8f79-9bda89451885" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.125627] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "65f34f9e-353a-4f94-8f79-9bda89451885" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.140183] env[61839]: DEBUG oslo_vmware.api [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314608, 'name': RemoveSnapshot_Task, 'duration_secs': 0.567289} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.140322] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 885.140610] env[61839]: INFO nova.compute.manager [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Took 18.74 seconds to snapshot the instance on the hypervisor. [ 885.175975] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 042183e2-d203-4d07-a668-df24ba50e242 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 885.176173] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Migration 833b7c1c-6dd7-46e6-b8d3-07af4e28c4d6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 885.178019] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance fa8a2265-291e-4424-bea1-72574e495a72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.226665] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314601, 'name': CloneVM_Task, 'duration_secs': 1.160006} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.226967] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Created linked-clone VM from snapshot [ 885.227727] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26407f1-2aeb-4034-9a49-6c0cdf8e5708 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.235599] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Uploading image d630a97f-7e84-429e-8613-52457a48a10a {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 885.266958] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 885.266958] env[61839]: value = "vm-281385" [ 885.266958] env[61839]: _type = "VirtualMachine" [ 885.266958] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 885.267627] env[61839]: DEBUG oslo_vmware.api [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314609, 'name': PowerOnVM_Task, 'duration_secs': 0.527024} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.267705] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-89eb5576-9b77-455f-8c30-61872a11f525 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.269693] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.269956] env[61839]: INFO nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Took 8.00 seconds to spawn the instance on the hypervisor. [ 885.270246] env[61839]: DEBUG nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 885.270979] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19e10e8-7527-4e0d-82c9-e8d522ad361d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.285840] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease: (returnval){ [ 885.285840] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ee716a-6b7b-6479-22b5-96df53081eed" [ 885.285840] env[61839]: _type = "HttpNfcLease" [ 885.285840] env[61839]: } obtained for exporting VM: (result){ [ 885.285840] env[61839]: value = "vm-281385" [ 885.285840] env[61839]: _type = "VirtualMachine" [ 885.285840] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 885.286188] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the lease: (returnval){ [ 885.286188] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ee716a-6b7b-6479-22b5-96df53081eed" [ 885.286188] env[61839]: _type = "HttpNfcLease" [ 885.286188] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 885.293352] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 885.293352] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ee716a-6b7b-6479-22b5-96df53081eed" [ 885.293352] env[61839]: _type = "HttpNfcLease" [ 885.293352] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 885.628712] env[61839]: DEBUG nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 885.682317] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 65f34f9e-353a-4f94-8f79-9bda89451885 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 885.682705] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 885.682825] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 885.730904] env[61839]: DEBUG nova.compute.manager [None req-460ca6f5-b390-4cca-bac6-7db949ba1471 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Found 1 images (rotation: 2) {{(pid=61839) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 885.800367] env[61839]: INFO nova.compute.manager [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Took 30.11 seconds to build instance. [ 885.804713] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 885.804713] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ee716a-6b7b-6479-22b5-96df53081eed" [ 885.804713] env[61839]: _type = "HttpNfcLease" [ 885.804713] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 885.805015] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 885.805015] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ee716a-6b7b-6479-22b5-96df53081eed" [ 885.805015] env[61839]: _type = "HttpNfcLease" [ 885.805015] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 885.805754] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2a45c1-6637-4b2f-a962-c767dbc644b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.813830] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522e36b8-66b5-12bd-2db3-9c3ef207a348/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 885.814013] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522e36b8-66b5-12bd-2db3-9c3ef207a348/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 885.929557] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-dcf252b6-0e19-4500-b66e-9047d216b94f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.105199] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8362fec8-aef3-47cc-9765-fd526f6ef351 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.117215] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50d1606-8131-4d8e-bfeb-0eea691fd90a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.126377] env[61839]: DEBUG nova.compute.manager [req-11015589-e3d3-4d7a-821c-65684712a0fc req-4cd7467b-ae1f-4bed-a885-4cee4848f853 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Received event network-vif-deleted-2615f303-83dd-4c9f-b09e-7cfb5da3276f {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.126693] env[61839]: INFO nova.compute.manager [req-11015589-e3d3-4d7a-821c-65684712a0fc req-4cd7467b-ae1f-4bed-a885-4cee4848f853 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Neutron deleted interface 2615f303-83dd-4c9f-b09e-7cfb5da3276f; detaching it from the instance and deleting it from the info cache [ 886.126951] env[61839]: DEBUG nova.network.neutron [req-11015589-e3d3-4d7a-821c-65684712a0fc req-4cd7467b-ae1f-4bed-a885-4cee4848f853 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.161522] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3837b530-84b5-44f8-be1b-73139f65aa22 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.164431] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25f32e7d-8129-483d-b31c-6292cd90c53a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.177691] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a124bf46-c1ab-4890-82fc-1008028dd1a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.185301] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14efa027-fb5f-47f4-8e65-b93a28f0d3b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.197844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.215464] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.232834] env[61839]: DEBUG nova.network.neutron [-] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.234284] env[61839]: DEBUG nova.compute.manager [req-11015589-e3d3-4d7a-821c-65684712a0fc req-4cd7467b-ae1f-4bed-a885-4cee4848f853 service nova] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Detach interface failed, port_id=2615f303-83dd-4c9f-b09e-7cfb5da3276f, reason: Instance 39728872-2d30-48eb-90da-412f1e45971c could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 886.303034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b23234c0-1dd9-455d-88df-914017787547 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.634s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.547672] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-40c54d84-8e50-483a-b4e0-5f1cc72b0880-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.548087] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-40c54d84-8e50-483a-b4e0-5f1cc72b0880-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.548614] env[61839]: DEBUG nova.objects.instance [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'flavor' on Instance uuid 40c54d84-8e50-483a-b4e0-5f1cc72b0880 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.722725] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.731852] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "0d42326a-9958-463a-90ae-34fb55e99c5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.732937] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.736363] env[61839]: INFO nova.compute.manager [-] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Took 2.41 seconds to deallocate network for instance. [ 887.233813] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 887.233813] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.618s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.239018] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.599s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.239018] env[61839]: INFO nova.compute.claims [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.241849] env[61839]: DEBUG nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 887.246264] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.306799] env[61839]: DEBUG nova.objects.instance [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'pci_requests' on Instance uuid 40c54d84-8e50-483a-b4e0-5f1cc72b0880 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.460208] env[61839]: DEBUG nova.compute.manager [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 887.461381] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3a0bb9-efcd-49c0-b574-ebfbb54c58d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.774824] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.809354] env[61839]: DEBUG nova.objects.base [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Object Instance<40c54d84-8e50-483a-b4e0-5f1cc72b0880> lazy-loaded attributes: flavor,pci_requests {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 887.810164] env[61839]: DEBUG nova.network.neutron [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 887.905927] env[61839]: DEBUG nova.policy [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 887.973806] env[61839]: INFO nova.compute.manager [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] instance snapshotting [ 887.975462] env[61839]: DEBUG nova.objects.instance [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'flavor' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.294702] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 888.298028] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281387', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'name': 'volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4463efc-ffca-4552-a072-cbf5fe062533', 'attached_at': '', 'detached_at': '', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'serial': '0b8e7770-f390-47f6-b69f-12c3b2a6f497'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 888.298028] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf97ecec-43cc-411c-9f3b-840603439959 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.329590] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a4c9b4-570a-42c5-8c2f-7a73b2f14efa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.370921] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497/volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.379594] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75860095-bf9c-4d9b-a130-c6b0a3ba8561 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.404486] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 888.404486] env[61839]: value = "task-1314612" [ 888.404486] env[61839]: _type = "Task" [ 888.404486] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.422737] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.482940] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a34dfb7-62b2-40a1-b2c4-75ce1f5729c1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.518026] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3751a86-9f87-485e-8353-16c75e10a26b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.554223] env[61839]: DEBUG nova.network.neutron [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Successfully created port: c97479ed-b409-4c5f-950d-e8663ef7da4e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.662519] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9086fb1e-0fe2-488b-a21d-22ff3eb14b85 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.674111] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340d4f5f-d978-41f8-934a-f0463226dc4c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.708473] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6b0674-df2b-4d62-b065-5deb63a89f10 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.722060] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa342239-e0fa-4876-911d-0da9aface216 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.730071] env[61839]: DEBUG nova.compute.manager [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Received event network-changed-7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.730369] env[61839]: DEBUG nova.compute.manager [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Refreshing instance network info cache due to event network-changed-7dee6a06-890e-4663-a919-d96beac69d5d. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 888.731021] env[61839]: DEBUG oslo_concurrency.lockutils [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] Acquiring lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.731285] env[61839]: DEBUG oslo_concurrency.lockutils [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] Acquired lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.731469] env[61839]: DEBUG nova.network.neutron [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Refreshing network info cache for port 7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.756021] env[61839]: DEBUG nova.compute.provider_tree [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.916683] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314612, 'name': ReconfigVM_Task, 'duration_secs': 0.507571} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.917097] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Reconfigured VM instance instance-0000004e to attach disk [datastore2] volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497/volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.923298] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5721d3a-bb4c-408d-8a3e-384e456d8b9a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.941893] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 888.941893] env[61839]: value = "task-1314613" [ 888.941893] env[61839]: _type = "Task" [ 888.941893] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.956361] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314613, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.034031] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 889.034421] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-801d6e19-e394-4aff-8460-b33eb28b5712 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.046382] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 889.046382] env[61839]: value = "task-1314614" [ 889.046382] env[61839]: _type = "Task" [ 889.046382] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.058449] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314614, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.259181] env[61839]: DEBUG nova.scheduler.client.report [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.457388] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.558632] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314614, 'name': CreateSnapshot_Task, 'duration_secs': 0.514528} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.559184] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 889.560248] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9dc5d9-0f83-4f28-a083-21080dbb171d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.580373] env[61839]: DEBUG nova.network.neutron [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Updated VIF entry in instance network info cache for port 7dee6a06-890e-4663-a919-d96beac69d5d. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.580875] env[61839]: DEBUG nova.network.neutron [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Updating instance_info_cache with network_info: [{"id": "7dee6a06-890e-4663-a919-d96beac69d5d", "address": "fa:16:3e:ff:68:87", "network": {"id": "daab8b4e-d8f9-408d-874d-b68ea25fd50c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1447004582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "766a6b1d62ed40bfa5fe23e553b1d0f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dee6a06-89", "ovs_interfaceid": "7dee6a06-890e-4663-a919-d96beac69d5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.764075] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.764659] env[61839]: DEBUG nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 889.768158] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.751s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.956388] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.084902] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 890.086915] env[61839]: DEBUG oslo_concurrency.lockutils [req-b3ecac69-f09e-4c8f-ada9-429328ffcb8a req-0fe9cc6b-9a82-4b95-bb7d-7254cf789a3d service nova] Releasing lock "refresh_cache-d187e75f-39a9-467b-b5ef-e2772d9b71af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.087284] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e9b71625-f359-48cc-862d-9962c5d735bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.093122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.093122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.098007] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 890.098007] env[61839]: value = "task-1314615" [ 890.098007] env[61839]: _type = "Task" [ 890.098007] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.108893] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314615, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.273026] env[61839]: DEBUG nova.compute.utils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.277208] env[61839]: INFO nova.compute.claims [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.289328] env[61839]: DEBUG nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 890.289328] env[61839]: DEBUG nova.network.neutron [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 890.351070] env[61839]: DEBUG nova.policy [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de344d8cc13340d7affed971d75f486d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '251b0d2531ba4f14a2eb6ea75382c418', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 890.424196] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "bac4c882-a23d-412f-ae98-f4f21d86681a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.426019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.454891] env[61839]: DEBUG oslo_vmware.api [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314613, 'name': ReconfigVM_Task, 'duration_secs': 1.072625} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.455651] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281387', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'name': 'volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4463efc-ffca-4552-a072-cbf5fe062533', 'attached_at': '', 'detached_at': '', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'serial': '0b8e7770-f390-47f6-b69f-12c3b2a6f497'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 890.502385] env[61839]: DEBUG nova.network.neutron [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Successfully updated port: c97479ed-b409-4c5f-950d-e8663ef7da4e {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.596131] env[61839]: DEBUG nova.compute.utils [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.611861] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314615, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.691585] env[61839]: DEBUG nova.network.neutron [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Successfully created port: 046a1230-64fc-4271-9dbb-bd9cab4951bd {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.789690] env[61839]: INFO nova.compute.resource_tracker [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating resource usage from migration 833b7c1c-6dd7-46e6-b8d3-07af4e28c4d6 [ 890.793134] env[61839]: DEBUG nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 890.929723] env[61839]: DEBUG nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 891.011569] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.011881] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.012152] env[61839]: DEBUG nova.network.neutron [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.104542] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.118532] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314615, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.137084] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8cfef4-fe02-4e77-a828-b142e95182c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.145838] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4663ba63-e029-43e5-85a8-c2f58d9bed52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.178592] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690f9d34-ba71-4995-beb0-28b706fe9517 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.188064] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3057f85a-2102-4282-9275-4f95a379a1d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.203930] env[61839]: DEBUG nova.compute.provider_tree [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.456514] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.515103] env[61839]: DEBUG nova.objects.instance [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'flavor' on Instance uuid a4463efc-ffca-4552-a072-cbf5fe062533 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 891.551399] env[61839]: WARNING nova.network.neutron [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] 41c98894-de91-45eb-a390-6217e0f9dca5 already exists in list: networks containing: ['41c98894-de91-45eb-a390-6217e0f9dca5']. ignoring it [ 891.616508] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314615, 'name': CloneVM_Task, 'duration_secs': 1.450421} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.616914] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Created linked-clone VM from snapshot [ 891.617666] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88a47e7-050c-4ae5-b973-ee10688ac4b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.628465] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Uploading image d8ae1da1-4d7c-4f47-ae1e-54319145ff4b {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 891.652412] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 891.652412] env[61839]: value = "vm-281389" [ 891.652412] env[61839]: _type = "VirtualMachine" [ 891.652412] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 891.652412] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2d5ec18d-90a5-4246-8ef3-2e9ce908034b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.664089] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease: (returnval){ [ 891.664089] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52087d1d-1770-45e6-55e2-e5398c69425b" [ 891.664089] env[61839]: _type = "HttpNfcLease" [ 891.664089] env[61839]: } obtained for exporting VM: (result){ [ 891.664089] env[61839]: value = "vm-281389" [ 891.664089] env[61839]: _type = "VirtualMachine" [ 891.664089] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 891.664350] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the lease: (returnval){ [ 891.664350] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52087d1d-1770-45e6-55e2-e5398c69425b" [ 891.664350] env[61839]: _type = "HttpNfcLease" [ 891.664350] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 891.675410] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 891.675410] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52087d1d-1770-45e6-55e2-e5398c69425b" [ 891.675410] env[61839]: _type = "HttpNfcLease" [ 891.675410] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 891.707586] env[61839]: DEBUG nova.scheduler.client.report [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.805302] env[61839]: DEBUG nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 891.832321] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a1ec9d0c85ce7356db01faa8968485b6',container_format='bare',created_at=2024-10-18T16:58:44Z,direct_url=,disk_format='vmdk',id=47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6,min_disk=1,min_ram=0,name='tempest-test-snap-2119625065',owner='251b0d2531ba4f14a2eb6ea75382c418',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-18T16:58:59Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 891.834017] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 891.834408] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 891.835016] env[61839]: DEBUG nova.virt.hardware [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.837564] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1843bcde-a20d-4e91-b778-d41521fb3dff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.844583] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91262921-f0fb-4131-93c2-c65545f975d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.924546] env[61839]: DEBUG nova.network.neutron [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c97479ed-b409-4c5f-950d-e8663ef7da4e", "address": "fa:16:3e:f3:90:d4", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc97479ed-b4", "ovs_interfaceid": "c97479ed-b409-4c5f-950d-e8663ef7da4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.022715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1fc33993-f180-4741-84a9-74e3882fe537 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.389s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.159371] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.159645] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.159890] env[61839]: INFO nova.compute.manager [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Attaching volume d2c3d3cb-6455-41aa-afdd-96adbd2dda18 to /dev/sdb [ 892.174786] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 892.174786] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52087d1d-1770-45e6-55e2-e5398c69425b" [ 892.174786] env[61839]: _type = "HttpNfcLease" [ 892.174786] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 892.175061] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 892.175061] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52087d1d-1770-45e6-55e2-e5398c69425b" [ 892.175061] env[61839]: _type = "HttpNfcLease" [ 892.175061] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 892.175844] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4029ff18-3d14-42e7-8762-6475161df1c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.184520] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528bc97f-edf2-b817-a5e1-e0e0fb5e6883/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 892.184764] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528bc97f-edf2-b817-a5e1-e0e0fb5e6883/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 892.243146] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.475s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.243446] env[61839]: INFO nova.compute.manager [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Migrating [ 892.243699] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.243851] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.246229] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.049s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.247724] env[61839]: INFO nova.compute.claims [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.250790] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19543ece-fd8e-42d7-a999-612b1db794ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.265711] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9eefce-7dc8-43a8-8761-c5bbbb01e8d5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.283681] env[61839]: DEBUG nova.virt.block_device [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Updating existing volume attachment record: 4252170d-553e-4bb6-a743-8e420e9e7cbf {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 892.293163] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8494329d-e9ca-4681-bf4c-cc6ec412908c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.415087] env[61839]: DEBUG nova.network.neutron [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Successfully updated port: 046a1230-64fc-4271-9dbb-bd9cab4951bd {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.427319] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.427957] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.428144] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.429568] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b09c4b-149d-4896-b8e4-ed7a0196e67c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.448789] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.449060] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.449211] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.449403] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.449555] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.449705] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.449910] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.450084] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.450259] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.450428] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.450605] env[61839]: DEBUG nova.virt.hardware [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.457065] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Reconfiguring VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 892.459822] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdd366a9-ab03-4de7-99b0-5ef0985b922c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.479386] env[61839]: DEBUG oslo_vmware.api [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 892.479386] env[61839]: value = "task-1314618" [ 892.479386] env[61839]: _type = "Task" [ 892.479386] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.492221] env[61839]: DEBUG oslo_vmware.api [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314618, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.622224] env[61839]: DEBUG nova.compute.manager [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received event network-vif-plugged-c97479ed-b409-4c5f-950d-e8663ef7da4e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.622533] env[61839]: DEBUG oslo_concurrency.lockutils [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.622768] env[61839]: DEBUG oslo_concurrency.lockutils [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.622917] env[61839]: DEBUG oslo_concurrency.lockutils [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.623121] env[61839]: DEBUG nova.compute.manager [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] No waiting events found dispatching network-vif-plugged-c97479ed-b409-4c5f-950d-e8663ef7da4e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 892.623293] env[61839]: WARNING nova.compute.manager [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received unexpected event network-vif-plugged-c97479ed-b409-4c5f-950d-e8663ef7da4e for instance with vm_state active and task_state None. [ 892.623462] env[61839]: DEBUG nova.compute.manager [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received event network-changed-c97479ed-b409-4c5f-950d-e8663ef7da4e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.623679] env[61839]: DEBUG nova.compute.manager [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Refreshing instance network info cache due to event network-changed-c97479ed-b409-4c5f-950d-e8663ef7da4e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 892.623950] env[61839]: DEBUG oslo_concurrency.lockutils [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] Acquiring lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.624180] env[61839]: DEBUG oslo_concurrency.lockutils [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] Acquired lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.624488] env[61839]: DEBUG nova.network.neutron [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Refreshing network info cache for port c97479ed-b409-4c5f-950d-e8663ef7da4e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.758943] env[61839]: INFO nova.compute.rpcapi [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 892.759729] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.922100] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "refresh_cache-042183e2-d203-4d07-a668-df24ba50e242" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.922428] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "refresh_cache-042183e2-d203-4d07-a668-df24ba50e242" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.922922] env[61839]: DEBUG nova.network.neutron [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.991350] env[61839]: DEBUG oslo_vmware.api [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314618, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.252770] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.253708] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.285639] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.285790] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.286264] env[61839]: DEBUG nova.network.neutron [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.363169] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a4463efc-ffca-4552-a072-cbf5fe062533" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.364625] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.365956] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a4463efc-ffca-4552-a072-cbf5fe062533-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.366227] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.366417] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.376593] env[61839]: INFO nova.compute.manager [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Terminating instance [ 893.377893] env[61839]: DEBUG nova.compute.manager [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 893.379119] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.383068] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02009566-e87b-4786-a469-2b45fa837b8b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.390427] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 893.390427] env[61839]: value = "task-1314621" [ 893.390427] env[61839]: _type = "Task" [ 893.390427] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.407020] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.492978] env[61839]: DEBUG oslo_vmware.api [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314618, 'name': ReconfigVM_Task, 'duration_secs': 0.838544} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.494050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.494050] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Reconfigured VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 893.503366] env[61839]: DEBUG nova.network.neutron [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.533679] env[61839]: DEBUG nova.network.neutron [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updated VIF entry in instance network info cache for port c97479ed-b409-4c5f-950d-e8663ef7da4e. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.535776] env[61839]: DEBUG nova.network.neutron [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c97479ed-b409-4c5f-950d-e8663ef7da4e", "address": "fa:16:3e:f3:90:d4", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc97479ed-b4", "ovs_interfaceid": "c97479ed-b409-4c5f-950d-e8663ef7da4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.670283] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebf31a9-8b14-4149-9512-e43999b26bc8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.679950] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2e4b8f-051c-4dc6-b918-a5144a641996 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.716404] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407230ca-a092-41ab-89f6-489062534914 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.722253] env[61839]: DEBUG nova.network.neutron [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Updating instance_info_cache with network_info: [{"id": "046a1230-64fc-4271-9dbb-bd9cab4951bd", "address": "fa:16:3e:b1:8f:8f", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046a1230-64", "ovs_interfaceid": "046a1230-64fc-4271-9dbb-bd9cab4951bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.727610] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3339f7b-8687-4e2d-8f33-ef4997d2dadd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.746082] env[61839]: DEBUG nova.compute.provider_tree [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.757655] env[61839]: DEBUG nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 893.905370] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314621, 'name': PowerOffVM_Task, 'duration_secs': 0.243479} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.905766] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.906121] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 893.906417] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281387', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'name': 'volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4463efc-ffca-4552-a072-cbf5fe062533', 'attached_at': '', 'detached_at': '', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'serial': '0b8e7770-f390-47f6-b69f-12c3b2a6f497'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 893.907304] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c605e464-51db-4c99-a05e-4cd5f0bd497a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.934037] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeaa567a-15de-410e-bc8c-665b7be8deff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943293] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c142a1a-5d1d-4633-8104-cd67009ea8c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.971017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0aa2da1-d66b-4b32-9619-ebb9ef05d3e1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.991530] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] The volume has not been displaced from its original location: [datastore2] volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497/volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 893.997923] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 893.998507] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53afefff-1b1d-4a22-b72e-93d52c2775d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.016788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09d1e269-5606-4c3d-8f93-fdb73e5aa28e tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-40c54d84-8e50-483a-b4e0-5f1cc72b0880-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.469s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.025084] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 894.025084] env[61839]: value = "task-1314622" [ 894.025084] env[61839]: _type = "Task" [ 894.025084] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.036201] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314622, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.039146] env[61839]: DEBUG oslo_concurrency.lockutils [req-fbdab0ec-87cf-4a8e-a358-ceeeaf06c26c req-85324515-9f80-4e0f-8c5f-69a03f5590c0 service nova] Releasing lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.162699] env[61839]: DEBUG nova.network.neutron [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance_info_cache with network_info: [{"id": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "address": "fa:16:3e:48:06:f8", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471fcd5e-1e", "ovs_interfaceid": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.234155] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "refresh_cache-042183e2-d203-4d07-a668-df24ba50e242" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.234629] env[61839]: DEBUG nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Instance network_info: |[{"id": "046a1230-64fc-4271-9dbb-bd9cab4951bd", "address": "fa:16:3e:b1:8f:8f", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046a1230-64", "ovs_interfaceid": "046a1230-64fc-4271-9dbb-bd9cab4951bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 894.235269] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:8f:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '046a1230-64fc-4271-9dbb-bd9cab4951bd', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.244943] env[61839]: DEBUG oslo.service.loopingcall [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.245501] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.245869] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ab8828f-9c55-4f34-aac9-dc27e6c47246 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.264508] env[61839]: DEBUG nova.scheduler.client.report [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.280079] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.280079] env[61839]: value = "task-1314623" [ 894.280079] env[61839]: _type = "Task" [ 894.280079] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.293963] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314623, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.294637] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.538035] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314622, 'name': ReconfigVM_Task, 'duration_secs': 0.329255} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.538035] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 894.543900] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7af73a4f-0ff0-473d-baa9-4c4f1e0614cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.563557] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 894.563557] env[61839]: value = "task-1314624" [ 894.563557] env[61839]: _type = "Task" [ 894.563557] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.574897] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.666212] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.772681] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.774082] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.528s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.774405] env[61839]: DEBUG nova.objects.instance [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lazy-loading 'resources' on Instance uuid 39728872-2d30-48eb-90da-412f1e45971c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.797319] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314623, 'name': CreateVM_Task, 'duration_secs': 0.412341} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.797586] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.799246] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.799536] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.804409] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 894.805096] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35246894-1a60-4176-af28-e11b8678617f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.812478] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 894.812478] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5229a719-c603-44a5-5576-eea6c3110a40" [ 894.812478] env[61839]: _type = "Task" [ 894.812478] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.827977] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5229a719-c603-44a5-5576-eea6c3110a40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.055548] env[61839]: DEBUG nova.compute.manager [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Received event network-vif-plugged-046a1230-64fc-4271-9dbb-bd9cab4951bd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 895.055724] env[61839]: DEBUG oslo_concurrency.lockutils [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] Acquiring lock "042183e2-d203-4d07-a668-df24ba50e242-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.055953] env[61839]: DEBUG oslo_concurrency.lockutils [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] Lock "042183e2-d203-4d07-a668-df24ba50e242-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.056113] env[61839]: DEBUG oslo_concurrency.lockutils [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] Lock "042183e2-d203-4d07-a668-df24ba50e242-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.056288] env[61839]: DEBUG nova.compute.manager [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] No waiting events found dispatching network-vif-plugged-046a1230-64fc-4271-9dbb-bd9cab4951bd {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 895.056463] env[61839]: WARNING nova.compute.manager [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Received unexpected event network-vif-plugged-046a1230-64fc-4271-9dbb-bd9cab4951bd for instance with vm_state building and task_state spawning. [ 895.056651] env[61839]: DEBUG nova.compute.manager [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Received event network-changed-046a1230-64fc-4271-9dbb-bd9cab4951bd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 895.056834] env[61839]: DEBUG nova.compute.manager [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Refreshing instance network info cache due to event network-changed-046a1230-64fc-4271-9dbb-bd9cab4951bd. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 895.057064] env[61839]: DEBUG oslo_concurrency.lockutils [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] Acquiring lock "refresh_cache-042183e2-d203-4d07-a668-df24ba50e242" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.057212] env[61839]: DEBUG oslo_concurrency.lockutils [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] Acquired lock "refresh_cache-042183e2-d203-4d07-a668-df24ba50e242" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.057374] env[61839]: DEBUG nova.network.neutron [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Refreshing network info cache for port 046a1230-64fc-4271-9dbb-bd9cab4951bd {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.076459] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314624, 'name': ReconfigVM_Task, 'duration_secs': 0.208054} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.077019] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281387', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'name': 'volume-0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4463efc-ffca-4552-a072-cbf5fe062533', 'attached_at': '', 'detached_at': '', 'volume_id': '0b8e7770-f390-47f6-b69f-12c3b2a6f497', 'serial': '0b8e7770-f390-47f6-b69f-12c3b2a6f497'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 895.077447] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.078833] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec197f3-e759-462c-8c7b-29acc94cf285 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.088182] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.088536] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e35a200a-97e4-4137-ae84-6609d420bb65 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.176952] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.176952] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.177169] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleting the datastore file [datastore1] a4463efc-ffca-4552-a072-cbf5fe062533 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.177513] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8687e8f-c683-4d2b-8393-45f8472b1604 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.185242] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 895.185242] env[61839]: value = "task-1314627" [ 895.185242] env[61839]: _type = "Task" [ 895.185242] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.192652] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314627, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.277037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "2bbb75d7-8485-40b0-81e9-531e0d13f9ed" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.277341] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "2bbb75d7-8485-40b0-81e9-531e0d13f9ed" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.328938] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.329322] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Processing image 47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.329668] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.329891] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.330153] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.330484] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3b3f300-7db4-4480-9314-d6c658f8376b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.344475] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.344690] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.345492] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac40b851-7609-4c36-866e-afc4c041a686 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.355432] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 895.355432] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eb9670-902b-bd25-a521-e258041f0d5f" [ 895.355432] env[61839]: _type = "Task" [ 895.355432] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.368016] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eb9670-902b-bd25-a521-e258041f0d5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.599670] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9163cd-7e27-49b6-bdc0-548ac18c1f74 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.613028] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3686ab2-a601-4dc5-9c40-237068ed05ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.655682] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2194a25d-2b16-44ec-97d5-3acbf5a1b945 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.665315] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93045363-655d-4c89-bc4b-8e1352534b4a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.684048] env[61839]: DEBUG nova.compute.provider_tree [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.695849] env[61839]: DEBUG oslo_vmware.api [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314627, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242991} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.696161] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.696358] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.696564] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.696776] env[61839]: INFO nova.compute.manager [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Took 2.32 seconds to destroy the instance on the hypervisor. [ 895.697045] env[61839]: DEBUG oslo.service.loopingcall [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.697492] env[61839]: DEBUG nova.compute.manager [-] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 895.697593] env[61839]: DEBUG nova.network.neutron [-] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.783023] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "2bbb75d7-8485-40b0-81e9-531e0d13f9ed" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.783687] env[61839]: DEBUG nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 895.868393] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Preparing fetch location {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 895.868710] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Fetch image to [datastore1] OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00/OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00.vmdk {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 895.868962] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Downloading stream optimized image 47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 to [datastore1] OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00/OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00.vmdk on the data store datastore1 as vApp {{(pid=61839) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 895.869143] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Downloading image file data 47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 to the ESX as VM named 'OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00' {{(pid=61839) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 896.035342] env[61839]: DEBUG nova.network.neutron [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Updated VIF entry in instance network info cache for port 046a1230-64fc-4271-9dbb-bd9cab4951bd. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.035731] env[61839]: DEBUG nova.network.neutron [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Updating instance_info_cache with network_info: [{"id": "046a1230-64fc-4271-9dbb-bd9cab4951bd", "address": "fa:16:3e:b1:8f:8f", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046a1230-64", "ovs_interfaceid": "046a1230-64fc-4271-9dbb-bd9cab4951bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.073740] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-40c54d84-8e50-483a-b4e0-5f1cc72b0880-c97479ed-b409-4c5f-950d-e8663ef7da4e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.074065] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-40c54d84-8e50-483a-b4e0-5f1cc72b0880-c97479ed-b409-4c5f-950d-e8663ef7da4e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.184804] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522e36b8-66b5-12bd-2db3-9c3ef207a348/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 896.185867] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6605f767-c201-4868-82a0-779c4b5cf320 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.193320] env[61839]: DEBUG nova.scheduler.client.report [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.200091] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e719bc8c-4325-4586-b058-6175930816b1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.204369] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522e36b8-66b5-12bd-2db3-9c3ef207a348/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 896.204579] env[61839]: ERROR oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522e36b8-66b5-12bd-2db3-9c3ef207a348/disk-0.vmdk due to incomplete transfer. [ 896.205207] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5b40b166-a702-42c0-878f-5fbfd4a27ba3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.221348] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance 'fa8a2265-291e-4424-bea1-72574e495a72' progress to 0 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 896.226876] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 896.226876] env[61839]: value = "resgroup-9" [ 896.226876] env[61839]: _type = "ResourcePool" [ 896.226876] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 896.227507] env[61839]: DEBUG oslo_vmware.rw_handles [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522e36b8-66b5-12bd-2db3-9c3ef207a348/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 896.227797] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Uploaded image d630a97f-7e84-429e-8613-52457a48a10a to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 896.229749] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 896.231749] env[61839]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-86dd5afa-3d24-4ef0-83f6-94aa8b8d3778 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.245529] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-742bb17f-c72f-4e9d-9735-8f9e34fbb5f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.255752] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lease: (returnval){ [ 896.255752] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52420225-f2de-0c95-4fb5-b8a998f18381" [ 896.255752] env[61839]: _type = "HttpNfcLease" [ 896.255752] env[61839]: } obtained for vApp import into resource pool (val){ [ 896.255752] env[61839]: value = "resgroup-9" [ 896.255752] env[61839]: _type = "ResourcePool" [ 896.255752] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 896.255752] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the lease: (returnval){ [ 896.255752] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52420225-f2de-0c95-4fb5-b8a998f18381" [ 896.255752] env[61839]: _type = "HttpNfcLease" [ 896.255752] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 896.274048] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 896.274048] env[61839]: value = "task-1314628" [ 896.274048] env[61839]: _type = "Task" [ 896.274048] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.279628] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 896.279628] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52420225-f2de-0c95-4fb5-b8a998f18381" [ 896.279628] env[61839]: _type = "HttpNfcLease" [ 896.279628] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 896.286441] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314628, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.288891] env[61839]: DEBUG nova.compute.utils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 896.290614] env[61839]: DEBUG nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 896.290893] env[61839]: DEBUG nova.network.neutron [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 896.374925] env[61839]: DEBUG nova.policy [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '748b722206b347e3b3d8ae0e3abf38da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a03043943ea42c6b2d13c9ed76726d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 896.539251] env[61839]: DEBUG oslo_concurrency.lockutils [req-9668f9f8-e17f-4c32-a006-a4025c6dde1f req-fada506c-ac10-406c-95ae-519c53f28285 service nova] Releasing lock "refresh_cache-042183e2-d203-4d07-a668-df24ba50e242" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.577192] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.577401] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.578472] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d859c0-e236-400f-a41e-805aecef4523 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.598065] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462e22d1-fc58-4c32-92d1-7376188fe797 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.625179] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Reconfiguring VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 896.625938] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-474b32f4-47ed-4d38-a46b-b1d1a4fa4005 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.638976] env[61839]: DEBUG nova.network.neutron [-] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.646754] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 896.646754] env[61839]: value = "task-1314630" [ 896.646754] env[61839]: _type = "Task" [ 896.646754] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.656531] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.699724] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.926s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.702594] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.928s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.704047] env[61839]: INFO nova.compute.claims [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.735016] env[61839]: INFO nova.scheduler.client.report [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Deleted allocations for instance 39728872-2d30-48eb-90da-412f1e45971c [ 896.746978] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 896.747329] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-387e1f14-d756-4932-8abe-800f6264de6f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.756264] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 896.756264] env[61839]: value = "task-1314631" [ 896.756264] env[61839]: _type = "Task" [ 896.756264] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.772298] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.774260] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 896.774260] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52420225-f2de-0c95-4fb5-b8a998f18381" [ 896.774260] env[61839]: _type = "HttpNfcLease" [ 896.774260] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 896.783774] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314628, 'name': Destroy_Task, 'duration_secs': 0.423769} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.784307] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Destroyed the VM [ 896.784453] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 896.784715] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1a4c789a-c378-4b86-aea9-a8464944ee56 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.793398] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 896.793398] env[61839]: value = "task-1314632" [ 896.793398] env[61839]: _type = "Task" [ 896.793398] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.794024] env[61839]: DEBUG nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 896.807988] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314632, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.832661] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 896.833690] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281391', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'name': 'volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3f86a0d5-30fd-42cc-bd40-14bce9d0e56f', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'serial': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 896.835960] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36412a93-336e-401b-a5b8-4df1af1b8e5b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.858704] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4f3116-60ad-47ec-8dd5-ad01d2ed4fd8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.882588] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18/volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.882972] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf3af8c6-8151-4690-ac1c-151b022c2a48 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.903761] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 896.903761] env[61839]: value = "task-1314633" [ 896.903761] env[61839]: _type = "Task" [ 896.903761] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.910586] env[61839]: DEBUG nova.network.neutron [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Successfully created port: 06a51f4e-0ef4-4148-b0a8-468d9345dc05 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 896.915729] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.100434] env[61839]: DEBUG nova.compute.manager [req-20b0036d-db4a-4a33-bb72-6c4bcf272ada req-15f1814c-b4f9-428d-9119-5902fea63753 service nova] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Received event network-vif-deleted-c003fe42-e6b6-4c8d-bfa7-f1e28aadf9f8 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.141654] env[61839]: INFO nova.compute.manager [-] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Took 1.44 seconds to deallocate network for instance. [ 897.161042] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.245543] env[61839]: DEBUG oslo_concurrency.lockutils [None req-047f717f-cb4c-4358-b6f0-e71d9ea669bd tempest-ServersTestManualDisk-590910019 tempest-ServersTestManualDisk-590910019-project-member] Lock "39728872-2d30-48eb-90da-412f1e45971c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.152s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.280040] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 897.280040] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52420225-f2de-0c95-4fb5-b8a998f18381" [ 897.280040] env[61839]: _type = "HttpNfcLease" [ 897.280040] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 897.280447] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314631, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.280662] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 897.280662] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52420225-f2de-0c95-4fb5-b8a998f18381" [ 897.280662] env[61839]: _type = "HttpNfcLease" [ 897.280662] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 897.281490] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7db3cb4-e414-4784-a149-9401a31c6450 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.291017] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52131ebe-8997-af43-a95d-92a8bcc827cb/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 897.291248] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52131ebe-8997-af43-a95d-92a8bcc827cb/disk-0.vmdk. {{(pid=61839) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 897.364186] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0683a13a-6025-45d1-b0f8-ccddc7026b27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.370306] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314632, 'name': RemoveSnapshot_Task, 'duration_secs': 0.404735} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.371653] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 897.371967] env[61839]: DEBUG nova.compute.manager [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.372980] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fba7a25-f9b7-4ee1-bb48-1789dab5ff01 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.417607] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314633, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.577945] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "56369316-a445-4a2a-a0a6-967074104e19" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.578165] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.658029] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.707343] env[61839]: INFO nova.compute.manager [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Took 0.57 seconds to detach 1 volumes for instance. [ 897.776468] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314631, 'name': PowerOffVM_Task, 'duration_secs': 0.624556} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.778739] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.781119] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance 'fa8a2265-291e-4424-bea1-72574e495a72' progress to 17 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 897.863390] env[61839]: DEBUG nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 897.895141] env[61839]: INFO nova.compute.manager [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Shelve offloading [ 897.903427] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.903427] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6cfa467-1d81-4374-b8ce-d1bef59553cf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.911616] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 897.911895] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 897.912091] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.912295] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 897.912492] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.912657] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 897.912876] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 897.913063] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 897.913278] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 897.913475] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 897.913678] env[61839]: DEBUG nova.virt.hardware [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 897.914616] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e624e69a-04b2-43a3-937a-12c13d41d349 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.925309] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 897.925309] env[61839]: value = "task-1314634" [ 897.925309] env[61839]: _type = "Task" [ 897.925309] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.938669] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314633, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.945034] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c15b795-84e6-4cc7-94fa-80d60fcf2292 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.960277] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 897.962019] env[61839]: DEBUG nova.compute.manager [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.962019] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53c8878-6e31-4a56-b709-9863369077c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.987019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.987019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.987019] env[61839]: DEBUG nova.network.neutron [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.081825] env[61839]: DEBUG nova.compute.utils [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 898.117688] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5029d5-e80e-473f-b874-8ff9c7e3b55c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.134729] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04783d1d-c1f8-4ba2-8c40-914ce826ad41 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.207030] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0db750-6520-4593-9535-2b51fed08c13 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.218520] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.225829] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0512cb65-0717-43ba-bbb3-da6e0b33b962 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.230021] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.243381] env[61839]: DEBUG nova.compute.provider_tree [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.279722] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Completed reading data from the image iterator. {{(pid=61839) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 898.280063] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52131ebe-8997-af43-a95d-92a8bcc827cb/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 898.281170] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d936c1c-1135-4ebd-81b0-3a0ee2df0bef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.292049] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.292302] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.292460] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.292643] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.292857] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.293127] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.293457] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.293672] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.294294] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.294294] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.294437] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.302937] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52131ebe-8997-af43-a95d-92a8bcc827cb/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 898.302937] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52131ebe-8997-af43-a95d-92a8bcc827cb/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 898.303140] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ac0b142-18c4-4da4-9a31-40c125ba1fa8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.314351] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3fa1dc0f-f88f-44cd-9388-f7d8fe31e470 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.325857] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 898.325857] env[61839]: value = "task-1314635" [ 898.325857] env[61839]: _type = "Task" [ 898.325857] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.339041] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314635, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.416282] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314633, 'name': ReconfigVM_Task, 'duration_secs': 1.475793} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.416690] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Reconfigured VM instance instance-00000045 to attach disk [datastore1] volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18/volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.424775] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a753fe4-5ce9-4e47-86b1-1aab3db12634 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.449357] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 898.449357] env[61839]: value = "task-1314636" [ 898.449357] env[61839]: _type = "Task" [ 898.449357] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.459227] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.586286] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.593272] env[61839]: DEBUG oslo_vmware.rw_handles [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52131ebe-8997-af43-a95d-92a8bcc827cb/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 898.593595] env[61839]: INFO nova.virt.vmwareapi.images [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Downloaded image file data 47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 [ 898.594585] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f81086e-6ed1-48ae-8107-900b5ccd76b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.613138] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b99da514-c316-402f-b6e6-974297254de0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.650755] env[61839]: INFO nova.virt.vmwareapi.images [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] The imported VM was unregistered [ 898.653586] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Caching image {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 898.654926] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating directory with path [datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.657018] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41b561cd-47f7-48f0-a0fd-a3e1d50e4d29 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.669651] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Created directory with path [datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.669941] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00/OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00.vmdk to [datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6.vmdk. {{(pid=61839) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 898.670246] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-a42596ab-8048-4934-8b81-fa8a77537079 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.679551] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 898.679551] env[61839]: value = "task-1314638" [ 898.679551] env[61839]: _type = "Task" [ 898.679551] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.690102] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314638, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.700128] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.748907] env[61839]: DEBUG nova.scheduler.client.report [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.763390] env[61839]: DEBUG nova.network.neutron [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Successfully updated port: 06a51f4e-0ef4-4148-b0a8-468d9345dc05 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 898.841142] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314635, 'name': ReconfigVM_Task, 'duration_secs': 0.223827} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.841904] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance 'fa8a2265-291e-4424-bea1-72574e495a72' progress to 33 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 898.930224] env[61839]: DEBUG nova.network.neutron [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.960391] env[61839]: DEBUG oslo_vmware.api [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314636, 'name': ReconfigVM_Task, 'duration_secs': 0.187226} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.960739] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281391', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'name': 'volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3f86a0d5-30fd-42cc-bd40-14bce9d0e56f', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'serial': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 899.209212] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314638, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.213930] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.243267] env[61839]: DEBUG nova.compute.manager [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Received event network-vif-plugged-06a51f4e-0ef4-4148-b0a8-468d9345dc05 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 899.243267] env[61839]: DEBUG oslo_concurrency.lockutils [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] Acquiring lock "65f34f9e-353a-4f94-8f79-9bda89451885-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.243463] env[61839]: DEBUG oslo_concurrency.lockutils [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] Lock "65f34f9e-353a-4f94-8f79-9bda89451885-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.243625] env[61839]: DEBUG oslo_concurrency.lockutils [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] Lock "65f34f9e-353a-4f94-8f79-9bda89451885-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.243914] env[61839]: DEBUG nova.compute.manager [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] No waiting events found dispatching network-vif-plugged-06a51f4e-0ef4-4148-b0a8-468d9345dc05 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 899.245351] env[61839]: WARNING nova.compute.manager [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Received unexpected event network-vif-plugged-06a51f4e-0ef4-4148-b0a8-468d9345dc05 for instance with vm_state building and task_state spawning. [ 899.245351] env[61839]: DEBUG nova.compute.manager [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Received event network-changed-06a51f4e-0ef4-4148-b0a8-468d9345dc05 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 899.245351] env[61839]: DEBUG nova.compute.manager [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Refreshing instance network info cache due to event network-changed-06a51f4e-0ef4-4148-b0a8-468d9345dc05. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 899.245351] env[61839]: DEBUG oslo_concurrency.lockutils [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] Acquiring lock "refresh_cache-65f34f9e-353a-4f94-8f79-9bda89451885" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.245766] env[61839]: DEBUG oslo_concurrency.lockutils [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] Acquired lock "refresh_cache-65f34f9e-353a-4f94-8f79-9bda89451885" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.245908] env[61839]: DEBUG nova.network.neutron [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Refreshing network info cache for port 06a51f4e-0ef4-4148-b0a8-468d9345dc05 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.255470] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.256678] env[61839]: DEBUG nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 899.262399] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.806s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.264553] env[61839]: INFO nova.compute.claims [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.268665] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "refresh_cache-65f34f9e-353a-4f94-8f79-9bda89451885" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.349498] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 899.350388] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 899.350793] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.351171] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 899.351525] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.351815] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 899.352200] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 899.352589] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 899.352943] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 899.353342] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 899.353662] env[61839]: DEBUG nova.virt.hardware [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 899.360548] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Reconfiguring VM instance instance-0000004f to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 899.361367] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3e1cace-5856-47fe-b5ba-194158dd7405 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.386570] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 899.386570] env[61839]: value = "task-1314639" [ 899.386570] env[61839]: _type = "Task" [ 899.386570] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.400868] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314639, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.436134] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.672563] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "56369316-a445-4a2a-a0a6-967074104e19" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.672876] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.673199] env[61839]: INFO nova.compute.manager [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Attaching volume 3c43b563-deaf-4d67-8cda-7d714c1bfac1 to /dev/sdb [ 899.704575] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314638, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.709710] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.715540] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f5b7fb-2187-472e-a508-a017c551a90e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.729405] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c43baca-4db5-415e-a376-39da24626155 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.748456] env[61839]: DEBUG nova.virt.block_device [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating existing volume attachment record: 88bb21d3-c985-471d-97c2-e59d0a40e62d {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 899.770113] env[61839]: DEBUG nova.compute.utils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.775027] env[61839]: DEBUG nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 899.775027] env[61839]: DEBUG nova.network.neutron [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.852557] env[61839]: DEBUG nova.policy [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b9ca67c278b4cb9a83ec3c6ce42af5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5aba1e066cb4400dbbacc92f393962e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 899.856738] env[61839]: DEBUG nova.network.neutron [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 899.900676] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314639, 'name': ReconfigVM_Task, 'duration_secs': 0.295616} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.901216] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Reconfigured VM instance instance-0000004f to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 899.902311] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafcf909-b2a0-42f0-88b8-75562365e2e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.947237] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] fa8a2265-291e-4424-bea1-72574e495a72/fa8a2265-291e-4424-bea1-72574e495a72.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.947628] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa0b1482-5c1d-41dd-b5fe-131c37448d55 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.979846] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 899.979846] env[61839]: value = "task-1314641" [ 899.979846] env[61839]: _type = "Task" [ 899.979846] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.994251] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314641, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.006789] env[61839]: DEBUG nova.objects.instance [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lazy-loading 'flavor' on Instance uuid 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.127568] env[61839]: DEBUG nova.network.neutron [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.201904] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314638, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.212756] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.282640] env[61839]: DEBUG nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 900.326848] env[61839]: DEBUG nova.network.neutron [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Successfully created port: fdc0f4f9-f380-4153-b4fb-7073fe3ac06e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.412804] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.413810] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5d1daf-80f1-4a3c-9df3-cb4595cc8a3e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.431186] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.431186] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a320246-0c01-4681-9074-f10f9c100620 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.498864] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314641, 'name': ReconfigVM_Task, 'duration_secs': 0.488472} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.501871] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Reconfigured VM instance instance-0000004f to attach disk [datastore2] fa8a2265-291e-4424-bea1-72574e495a72/fa8a2265-291e-4424-bea1-72574e495a72.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.501871] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance 'fa8a2265-291e-4424-bea1-72574e495a72' progress to 50 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 900.512100] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4125a688-0d5e-4b32-904c-d54ba064855a tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.352s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.600183] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.600183] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.600489] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleting the datastore file [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.600638] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-954fba61-064b-442d-ae15-54b494c3df1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.610041] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 900.610041] env[61839]: value = "task-1314645" [ 900.610041] env[61839]: _type = "Task" [ 900.610041] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.627623] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.633053] env[61839]: DEBUG oslo_concurrency.lockutils [req-e09113b6-3f0d-4d88-a789-1b8d074b0fb1 req-48d50c29-4397-467a-af56-9bcd2c050531 service nova] Releasing lock "refresh_cache-65f34f9e-353a-4f94-8f79-9bda89451885" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.633973] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquired lock "refresh_cache-65f34f9e-353a-4f94-8f79-9bda89451885" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.634203] env[61839]: DEBUG nova.network.neutron [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 900.648794] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b76777-e819-4cfb-9d09-b878edd274a4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.660418] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1acbb90-dfcb-4cfc-a054-c89e475b667a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.712068] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880841e4-b153-43f2-b6bc-8b685293753c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.729157] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48acf6ea-4e3e-4866-b4a2-e2d31859f384 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.732062] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.732390] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314638, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.746954] env[61839]: DEBUG nova.compute.provider_tree [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.892126] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528bc97f-edf2-b817-a5e1-e0e0fb5e6883/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 900.893413] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c2b98f-6bae-45d3-b0aa-2614090fb970 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.904744] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528bc97f-edf2-b817-a5e1-e0e0fb5e6883/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 900.904942] env[61839]: ERROR oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528bc97f-edf2-b817-a5e1-e0e0fb5e6883/disk-0.vmdk due to incomplete transfer. [ 900.905223] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e5e79619-1054-4350-9ab1-4707812acb7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.916655] env[61839]: DEBUG oslo_vmware.rw_handles [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528bc97f-edf2-b817-a5e1-e0e0fb5e6883/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 900.917029] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Uploaded image d8ae1da1-4d7c-4f47-ae1e-54319145ff4b to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 900.919674] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 900.919674] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8d3a6189-77b8-4412-9fa1-7d725c43cfa9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.935412] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 900.935412] env[61839]: value = "task-1314646" [ 900.935412] env[61839]: _type = "Task" [ 900.935412] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.948864] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314646, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.015076] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e8cca5-4654-4085-98e5-bec9e85a4b79 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.039101] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46971ff8-dc7a-4bb6-8b13-ece34440eeeb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.060296] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance 'fa8a2265-291e-4424-bea1-72574e495a72' progress to 67 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 901.123395] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.217627] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314638, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.220739] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.238138] env[61839]: DEBUG nova.network.neutron [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.255832] env[61839]: DEBUG nova.scheduler.client.report [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 901.294323] env[61839]: DEBUG nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 901.321274] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 901.321545] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 901.321710] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.321905] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 901.322065] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.322222] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 901.322467] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 901.322633] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 901.322815] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 901.322982] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 901.323173] env[61839]: DEBUG nova.virt.hardware [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 901.324096] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381876aa-fec5-4383-a4a0-6d36ff32d028 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.332514] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcee7826-dddf-49e2-abcd-6df73ed85fd9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.435829] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.436234] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.446968] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314646, 'name': Destroy_Task, 'duration_secs': 0.386222} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.448118] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Destroyed the VM [ 901.448118] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 901.448480] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-33b353ba-f60c-4e7f-8458-cb2dc65a24db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.455926] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 901.455926] env[61839]: value = "task-1314647" [ 901.455926] env[61839]: _type = "Task" [ 901.455926] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.467227] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314647, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.623190] env[61839]: DEBUG oslo_vmware.api [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.747306} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.623493] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.623713] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.623924] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.646798] env[61839]: INFO nova.scheduler.client.report [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted allocations for instance 694a5d4b-3673-406b-a24a-d37fad33e549 [ 901.698707] env[61839]: DEBUG nova.network.neutron [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Port 471fcd5e-1ea3-4791-9a4d-b68197f8068e binding to destination host cpu-1 is already ACTIVE {{(pid=61839) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 901.718575] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314638, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.607058} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.721739] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00/OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00.vmdk to [datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6.vmdk. [ 901.721952] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Cleaning up location [datastore1] OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 901.722141] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_3bd8a668-775b-49dc-b130-d7927d193b00 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.722696] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.724415] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34dcee4e-ee85-4f85-b62f-34571e57507f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.730080] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 901.730080] env[61839]: value = "task-1314648" [ 901.730080] env[61839]: _type = "Task" [ 901.730080] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.741745] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.756058] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.756599] env[61839]: DEBUG nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 901.759190] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.465s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.761320] env[61839]: INFO nova.compute.claims [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.774150] env[61839]: DEBUG nova.compute.manager [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-vif-unplugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.774478] env[61839]: DEBUG oslo_concurrency.lockutils [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.774871] env[61839]: DEBUG oslo_concurrency.lockutils [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.775239] env[61839]: DEBUG oslo_concurrency.lockutils [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.775524] env[61839]: DEBUG nova.compute.manager [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] No waiting events found dispatching network-vif-unplugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 901.775741] env[61839]: WARNING nova.compute.manager [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received unexpected event network-vif-unplugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 for instance with vm_state shelved_offloaded and task_state None. [ 901.775893] env[61839]: DEBUG nova.compute.manager [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.776075] env[61839]: DEBUG nova.compute.manager [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing instance network info cache due to event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 901.777194] env[61839]: DEBUG oslo_concurrency.lockutils [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] Acquiring lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.777194] env[61839]: DEBUG oslo_concurrency.lockutils [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] Acquired lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.777194] env[61839]: DEBUG nova.network.neutron [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.819755] env[61839]: DEBUG nova.network.neutron [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Updating instance_info_cache with network_info: [{"id": "06a51f4e-0ef4-4148-b0a8-468d9345dc05", "address": "fa:16:3e:3e:81:96", "network": {"id": "e8ab45f5-4096-45e6-92fb-825c519ddcc2", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2075574468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a03043943ea42c6b2d13c9ed76726d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a51f4e-0e", "ovs_interfaceid": "06a51f4e-0ef4-4148-b0a8-468d9345dc05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.943790] env[61839]: INFO nova.compute.manager [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Detaching volume d2c3d3cb-6455-41aa-afdd-96adbd2dda18 [ 901.968696] env[61839]: DEBUG oslo_vmware.api [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314647, 'name': RemoveSnapshot_Task, 'duration_secs': 0.495504} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.969606] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 901.969997] env[61839]: INFO nova.compute.manager [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Took 13.49 seconds to snapshot the instance on the hypervisor. [ 901.987031] env[61839]: INFO nova.virt.block_device [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Attempting to driver detach volume d2c3d3cb-6455-41aa-afdd-96adbd2dda18 from mountpoint /dev/sdb [ 901.987031] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 901.987031] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281391', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'name': 'volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3f86a0d5-30fd-42cc-bd40-14bce9d0e56f', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'serial': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 901.987031] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab00a36-cf1e-4b63-b9c3-98ada45c63a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.013574] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3282a8-6235-4807-87fc-117aa8cfbbe2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.021420] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9568c925-7bdf-427d-a0c5-30afcc12446c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.044688] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c296c2-eb6d-406e-86d1-40349145740a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.064022] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] The volume has not been displaced from its original location: [datastore1] volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18/volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 902.070239] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 902.070582] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b140fe1a-f49c-4150-a08a-5450df8ba740 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.091251] env[61839]: DEBUG oslo_vmware.api [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 902.091251] env[61839]: value = "task-1314649" [ 902.091251] env[61839]: _type = "Task" [ 902.091251] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.101543] env[61839]: DEBUG oslo_vmware.api [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314649, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.153182] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.209755] env[61839]: DEBUG nova.network.neutron [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Successfully updated port: fdc0f4f9-f380-4153-b4fb-7073fe3ac06e {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 902.225576] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.240521] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038179} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.240892] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.240965] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.241260] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6.vmdk to [datastore1] 042183e2-d203-4d07-a668-df24ba50e242/042183e2-d203-4d07-a668-df24ba50e242.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 902.241560] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0239b24-6847-492f-9b93-fb92d7c4bb21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.251045] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 902.251045] env[61839]: value = "task-1314651" [ 902.251045] env[61839]: _type = "Task" [ 902.251045] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.261860] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.265890] env[61839]: DEBUG nova.compute.utils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.273167] env[61839]: DEBUG nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 902.273167] env[61839]: DEBUG nova.network.neutron [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.325511] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Releasing lock "refresh_cache-65f34f9e-353a-4f94-8f79-9bda89451885" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.325511] env[61839]: DEBUG nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Instance network_info: |[{"id": "06a51f4e-0ef4-4148-b0a8-468d9345dc05", "address": "fa:16:3e:3e:81:96", "network": {"id": "e8ab45f5-4096-45e6-92fb-825c519ddcc2", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2075574468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a03043943ea42c6b2d13c9ed76726d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a51f4e-0e", "ovs_interfaceid": "06a51f4e-0ef4-4148-b0a8-468d9345dc05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 902.325511] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:81:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74e6f6e0-95e6-4531-99e9-0e78350fb655', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06a51f4e-0ef4-4148-b0a8-468d9345dc05', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 902.335516] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Creating folder: Project (1a03043943ea42c6b2d13c9ed76726d0). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 902.337565] env[61839]: DEBUG nova.policy [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c3398ade6db41df8d0ec52faf28a861', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ead4a6a3419348d9b5472d8f6747b9cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 902.339592] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-566222e6-52e5-4e5a-a68d-99e4ba47cc1e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.359589] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Created folder: Project (1a03043943ea42c6b2d13c9ed76726d0) in parent group-v281288. [ 902.359589] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Creating folder: Instances. Parent ref: group-v281396. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 902.359761] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8aff419d-910b-4ed4-9801-43c16484b6a2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.381753] env[61839]: DEBUG nova.compute.manager [req-bca506a8-03af-46aa-bbde-c024f07647b2 req-0fe1aa86-5111-4b1a-91e4-e270f724615e service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Received event network-vif-plugged-fdc0f4f9-f380-4153-b4fb-7073fe3ac06e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.381753] env[61839]: DEBUG oslo_concurrency.lockutils [req-bca506a8-03af-46aa-bbde-c024f07647b2 req-0fe1aa86-5111-4b1a-91e4-e270f724615e service nova] Acquiring lock "0d42326a-9958-463a-90ae-34fb55e99c5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.381753] env[61839]: DEBUG oslo_concurrency.lockutils [req-bca506a8-03af-46aa-bbde-c024f07647b2 req-0fe1aa86-5111-4b1a-91e4-e270f724615e service nova] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.381753] env[61839]: DEBUG oslo_concurrency.lockutils [req-bca506a8-03af-46aa-bbde-c024f07647b2 req-0fe1aa86-5111-4b1a-91e4-e270f724615e service nova] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.381753] env[61839]: DEBUG nova.compute.manager [req-bca506a8-03af-46aa-bbde-c024f07647b2 req-0fe1aa86-5111-4b1a-91e4-e270f724615e service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] No waiting events found dispatching network-vif-plugged-fdc0f4f9-f380-4153-b4fb-7073fe3ac06e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 902.387379] env[61839]: WARNING nova.compute.manager [req-bca506a8-03af-46aa-bbde-c024f07647b2 req-0fe1aa86-5111-4b1a-91e4-e270f724615e service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Received unexpected event network-vif-plugged-fdc0f4f9-f380-4153-b4fb-7073fe3ac06e for instance with vm_state building and task_state spawning. [ 902.387379] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Created folder: Instances in parent group-v281396. [ 902.387379] env[61839]: DEBUG oslo.service.loopingcall [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 902.387379] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 902.387867] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1372148e-2400-4c3f-a456-f42855f737a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.413505] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 902.413505] env[61839]: value = "task-1314654" [ 902.413505] env[61839]: _type = "Task" [ 902.413505] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.425263] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.523844] env[61839]: DEBUG nova.compute.manager [None req-ddf21196-dc08-4e00-b99c-d950eff89ba4 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Found 2 images (rotation: 2) {{(pid=61839) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 902.602612] env[61839]: DEBUG oslo_vmware.api [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314649, 'name': ReconfigVM_Task, 'duration_secs': 0.287857} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.602851] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 902.608205] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-927da056-b5bf-4360-a868-b0cf681a4f2d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.626847] env[61839]: DEBUG oslo_vmware.api [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 902.626847] env[61839]: value = "task-1314655" [ 902.626847] env[61839]: _type = "Task" [ 902.626847] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.639217] env[61839]: DEBUG oslo_vmware.api [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.657924] env[61839]: DEBUG nova.network.neutron [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Successfully created port: cd0c192b-65df-4a91-85c4-d0e336f93188 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.721335] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "refresh_cache-0d42326a-9958-463a-90ae-34fb55e99c5b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.721509] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "refresh_cache-0d42326a-9958-463a-90ae-34fb55e99c5b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.721688] env[61839]: DEBUG nova.network.neutron [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.727234] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "fa8a2265-291e-4424-bea1-72574e495a72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.727536] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.727753] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.753577] env[61839]: DEBUG oslo_vmware.api [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314630, 'name': ReconfigVM_Task, 'duration_secs': 5.820849} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.761322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.761322] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Reconfigured VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 902.771418] env[61839]: DEBUG nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 902.778780] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.855672] env[61839]: DEBUG nova.network.neutron [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updated VIF entry in instance network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.855672] env[61839]: DEBUG nova.network.neutron [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.936887] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.144179] env[61839]: DEBUG oslo_vmware.api [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314655, 'name': ReconfigVM_Task, 'duration_secs': 0.208254} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.144523] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281391', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'name': 'volume-d2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3f86a0d5-30fd-42cc-bd40-14bce9d0e56f', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18', 'serial': 'd2c3d3cb-6455-41aa-afdd-96adbd2dda18'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 903.151937] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cd0ea7-d845-4cfb-a595-bfdeda9071ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.160333] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79016317-f0d1-423f-a1d6-4dd211d65ab9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.209217] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83c4307-6b9b-45e5-9d4e-e9ded7352872 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.221446] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f70a1c1-7156-4190-9982-5f847ba14f4b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.242241] env[61839]: DEBUG nova.compute.provider_tree [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.278205] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.309639] env[61839]: DEBUG nova.network.neutron [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.357587] env[61839]: DEBUG oslo_concurrency.lockutils [req-72a677d0-7c76-4401-92aa-f35526d4cc21 req-b60ca049-896e-40fc-8229-f4b39b58babd service nova] Releasing lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.429269] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.548566] env[61839]: DEBUG nova.network.neutron [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Updating instance_info_cache with network_info: [{"id": "fdc0f4f9-f380-4153-b4fb-7073fe3ac06e", "address": "fa:16:3e:5f:c5:df", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdc0f4f9-f3", "ovs_interfaceid": "fdc0f4f9-f380-4153-b4fb-7073fe3ac06e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.709640] env[61839]: DEBUG nova.objects.instance [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lazy-loading 'flavor' on Instance uuid 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.747085] env[61839]: DEBUG nova.scheduler.client.report [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.766392] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.785212] env[61839]: DEBUG nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 903.817940] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.818084] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.818258] env[61839]: DEBUG nova.network.neutron [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.825177] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 903.825739] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 903.825739] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.826164] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 903.826164] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.826164] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 903.827150] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 903.827150] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 903.827150] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 903.827150] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 903.827150] env[61839]: DEBUG nova.virt.hardware [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 903.827811] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c965dc1-a8e7-4c46-823b-6b60e50cbc3d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.837512] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc422c1-cf9f-418d-ab2b-68ea53751d94 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.931024] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.022445] env[61839]: DEBUG nova.compute.manager [req-9c785d14-e0c9-432a-beac-266b0466a70f req-73f8a971-4ec4-47e0-a13a-991be4ffcda8 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received event network-vif-deleted-c97479ed-b409-4c5f-950d-e8663ef7da4e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.022445] env[61839]: INFO nova.compute.manager [req-9c785d14-e0c9-432a-beac-266b0466a70f req-73f8a971-4ec4-47e0-a13a-991be4ffcda8 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Neutron deleted interface c97479ed-b409-4c5f-950d-e8663ef7da4e; detaching it from the instance and deleting it from the info cache [ 904.022445] env[61839]: DEBUG nova.network.neutron [req-9c785d14-e0c9-432a-beac-266b0466a70f req-73f8a971-4ec4-47e0-a13a-991be4ffcda8 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.052559] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "refresh_cache-0d42326a-9958-463a-90ae-34fb55e99c5b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.052968] env[61839]: DEBUG nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Instance network_info: |[{"id": "fdc0f4f9-f380-4153-b4fb-7073fe3ac06e", "address": "fa:16:3e:5f:c5:df", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdc0f4f9-f3", "ovs_interfaceid": "fdc0f4f9-f380-4153-b4fb-7073fe3ac06e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 904.053461] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:c5:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdc0f4f9-f380-4153-b4fb-7073fe3ac06e', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.061014] env[61839]: DEBUG oslo.service.loopingcall [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.061685] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.061932] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e994277-44ca-4b0e-a28e-098e819ddfe0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.082557] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 904.082557] env[61839]: value = "task-1314656" [ 904.082557] env[61839]: _type = "Task" [ 904.082557] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.090862] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314656, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.204715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.254947] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.254947] env[61839]: DEBUG nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 904.258559] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.040s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.258559] env[61839]: DEBUG nova.objects.instance [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'resources' on Instance uuid a4463efc-ffca-4552-a072-cbf5fe062533 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.274650] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.274963] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.275238] env[61839]: DEBUG nova.network.neutron [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.276500] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.303885] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 904.303885] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281395', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'name': 'volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '56369316-a445-4a2a-a0a6-967074104e19', 'attached_at': '', 'detached_at': '', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'serial': '3c43b563-deaf-4d67-8cda-7d714c1bfac1'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 904.303885] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70ba6c7-4798-46db-8b48-d467eda31548 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.328680] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8badfeca-4870-4f5e-abcf-44e3c459619f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.359414] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1/volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 904.359750] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe899cbe-c33e-4b3f-8d6a-7241f420e562 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.386833] env[61839]: DEBUG oslo_vmware.api [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 904.386833] env[61839]: value = "task-1314657" [ 904.386833] env[61839]: _type = "Task" [ 904.386833] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.403976] env[61839]: DEBUG oslo_vmware.api [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314657, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.431388] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.469526] env[61839]: DEBUG nova.compute.manager [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 904.470469] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125537d1-2c85-4a4d-9ee3-0ad10bfa6a2e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.526413] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c785d14-e0c9-432a-beac-266b0466a70f req-73f8a971-4ec4-47e0-a13a-991be4ffcda8 service nova] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.526623] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c785d14-e0c9-432a-beac-266b0466a70f req-73f8a971-4ec4-47e0-a13a-991be4ffcda8 service nova] Acquired lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.527697] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fa5938-951e-4231-a098-154fae546727 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.549799] env[61839]: DEBUG oslo_concurrency.lockutils [req-9c785d14-e0c9-432a-beac-266b0466a70f req-73f8a971-4ec4-47e0-a13a-991be4ffcda8 service nova] Releasing lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.550186] env[61839]: WARNING nova.compute.manager [req-9c785d14-e0c9-432a-beac-266b0466a70f req-73f8a971-4ec4-47e0-a13a-991be4ffcda8 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Detach interface failed, port_id=c97479ed-b409-4c5f-950d-e8663ef7da4e, reason: No device with interface-id c97479ed-b409-4c5f-950d-e8663ef7da4e exists on VM: nova.exception.NotFound: No device with interface-id c97479ed-b409-4c5f-950d-e8663ef7da4e exists on VM [ 904.582330] env[61839]: DEBUG nova.network.neutron [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Successfully updated port: cd0c192b-65df-4a91-85c4-d0e336f93188 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.599062] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314656, 'name': CreateVM_Task, 'duration_secs': 0.365236} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.600077] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 904.600838] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.601071] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.601458] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 904.602064] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f380faf8-1847-4da7-abfb-a623fca4a136 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.609711] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 904.609711] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5224aebd-c542-091e-b1d8-fc2d717a4a4e" [ 904.609711] env[61839]: _type = "Task" [ 904.609711] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.622055] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5224aebd-c542-091e-b1d8-fc2d717a4a4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.718236] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f7ea511a-164d-41dd-8cb8-42ac6801a245 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.282s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.772241] env[61839]: DEBUG nova.compute.utils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 904.774039] env[61839]: DEBUG nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 904.774558] env[61839]: DEBUG nova.network.neutron [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 904.787344] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.836031] env[61839]: DEBUG nova.policy [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd83e8a2f63d4ae38c5989c1e3824e3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48d8c406ff504d71bba5fb74caf11c14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 904.882219] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.886035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.886035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.886035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.886035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.897469] env[61839]: INFO nova.compute.manager [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Terminating instance [ 904.904375] env[61839]: DEBUG nova.compute.manager [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 904.904375] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.904375] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b530f18-c5ec-47d1-99e7-cf0a87e38539 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.911828] env[61839]: DEBUG nova.network.neutron [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance_info_cache with network_info: [{"id": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "address": "fa:16:3e:48:06:f8", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471fcd5e-1e", "ovs_interfaceid": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.916445] env[61839]: DEBUG oslo_vmware.api [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314657, 'name': ReconfigVM_Task, 'duration_secs': 0.451674} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.916445] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfigured VM instance instance-0000004c to attach disk [datastore1] volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1/volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.929657] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b05870ae-e340-407b-805e-1ca8df704325 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.937599] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.945773] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73548306-60f1-4934-9c72-208d2e6f150e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.957192] env[61839]: DEBUG oslo_vmware.api [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 904.957192] env[61839]: value = "task-1314659" [ 904.957192] env[61839]: _type = "Task" [ 904.957192] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.965623] env[61839]: DEBUG oslo_vmware.api [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 904.965623] env[61839]: value = "task-1314658" [ 904.965623] env[61839]: _type = "Task" [ 904.965623] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.965623] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.982274] env[61839]: INFO nova.compute.manager [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] instance snapshotting [ 904.982993] env[61839]: DEBUG nova.objects.instance [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'flavor' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.984800] env[61839]: DEBUG oslo_vmware.api [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314658, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.991433] env[61839]: DEBUG oslo_vmware.api [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314659, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.089144] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "refresh_cache-bac4c882-a23d-412f-ae98-f4f21d86681a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.089315] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquired lock "refresh_cache-bac4c882-a23d-412f-ae98-f4f21d86681a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.089541] env[61839]: DEBUG nova.network.neutron [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.123740] env[61839]: DEBUG nova.compute.manager [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Received event network-changed-fdc0f4f9-f380-4153-b4fb-7073fe3ac06e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 905.123740] env[61839]: DEBUG nova.compute.manager [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Refreshing instance network info cache due to event network-changed-fdc0f4f9-f380-4153-b4fb-7073fe3ac06e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 905.123740] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] Acquiring lock "refresh_cache-0d42326a-9958-463a-90ae-34fb55e99c5b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.123740] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] Acquired lock "refresh_cache-0d42326a-9958-463a-90ae-34fb55e99c5b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.123740] env[61839]: DEBUG nova.network.neutron [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Refreshing network info cache for port fdc0f4f9-f380-4153-b4fb-7073fe3ac06e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.130550] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5224aebd-c542-091e-b1d8-fc2d717a4a4e, 'name': SearchDatastore_Task, 'duration_secs': 0.091984} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.131471] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.131663] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.131994] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.132258] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.132505] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.133116] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a8329c2-19e7-4244-8810-27c61812cd48 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.154349] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.154710] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.155857] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf9140fd-4b66-4ad3-acd6-909a96e3abb2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.168760] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 905.168760] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd7804-8a9b-44e6-2fba-73440f2548c5" [ 905.168760] env[61839]: _type = "Task" [ 905.168760] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.185197] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd7804-8a9b-44e6-2fba-73440f2548c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.222370] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2efbd04-16ff-4e39-9d53-5dbfb5250807 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.233187] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce81b5b-2569-4c35-a2d3-c16ff7ca2797 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.271637] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a93eb45-72d0-4c3f-a310-52e1ede80d32 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.279801] env[61839]: DEBUG nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 905.287326] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57c101b-1646-41f3-9b61-7ef794ed8d93 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.293608] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.299084] env[61839]: DEBUG nova.network.neutron [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [{"id": "4373753c-2ab4-4f61-8117-89f623225621", "address": "fa:16:3e:7a:8e:c0", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4373753c-2a", "ovs_interfaceid": "4373753c-2ab4-4f61-8117-89f623225621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.308566] env[61839]: DEBUG nova.compute.provider_tree [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.367821] env[61839]: DEBUG nova.network.neutron [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Successfully created port: 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 905.423375] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.441997] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.478734] env[61839]: DEBUG oslo_vmware.api [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314659, 'name': ReconfigVM_Task, 'duration_secs': 0.216277} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.478734] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281395', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'name': 'volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '56369316-a445-4a2a-a0a6-967074104e19', 'attached_at': '', 'detached_at': '', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'serial': '3c43b563-deaf-4d67-8cda-7d714c1bfac1'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 905.483396] env[61839]: DEBUG oslo_vmware.api [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314658, 'name': PowerOffVM_Task, 'duration_secs': 0.221146} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.483800] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.483975] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.484888] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-955788b2-9638-4871-8563-00bab95731ca {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.497970] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94d69c1-0638-4ee7-9c11-fcb990a4dbda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.524703] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637c8d62-8e90-49ac-a12a-5a943e10eea4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.561044] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.561481] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.561812] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleting the datastore file [datastore1] 40c54d84-8e50-483a-b4e0-5f1cc72b0880 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.562266] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-546711e0-47ed-496a-8ab1-c8162e558fbd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.572977] env[61839]: DEBUG oslo_vmware.api [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 905.572977] env[61839]: value = "task-1314661" [ 905.572977] env[61839]: _type = "Task" [ 905.572977] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.583428] env[61839]: DEBUG oslo_vmware.api [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.651237] env[61839]: DEBUG nova.network.neutron [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.687430] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dd7804-8a9b-44e6-2fba-73440f2548c5, 'name': SearchDatastore_Task, 'duration_secs': 0.092027} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.688456] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-458c2426-0db6-479e-810e-ea31b48df76d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.697617] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 905.697617] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522981e3-161f-1e17-02e3-7eb613dee1ec" [ 905.697617] env[61839]: _type = "Task" [ 905.697617] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.708976] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522981e3-161f-1e17-02e3-7eb613dee1ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.734978] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "12087baa-e700-4977-b2df-3aa2c56cc2f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.735194] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.779330] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.810958] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-40c54d84-8e50-483a-b4e0-5f1cc72b0880" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.814412] env[61839]: DEBUG nova.scheduler.client.report [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 905.944157] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.963750] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567c7e07-8fec-45dc-bcc9-827028fcf65e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.991890] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67984d3-c52d-4754-8134-9c237a23745a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.000389] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance 'fa8a2265-291e-4424-bea1-72574e495a72' progress to 83 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 906.038461] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 906.039270] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-53b6d454-bda8-4c5e-93ab-be75aed07cd9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.052206] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 906.052206] env[61839]: value = "task-1314662" [ 906.052206] env[61839]: _type = "Task" [ 906.052206] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.065743] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314662, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.089407] env[61839]: DEBUG oslo_vmware.api [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.217524] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522981e3-161f-1e17-02e3-7eb613dee1ec, 'name': SearchDatastore_Task, 'duration_secs': 0.089421} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.218045] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.218357] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 0d42326a-9958-463a-90ae-34fb55e99c5b/0d42326a-9958-463a-90ae-34fb55e99c5b.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.219378] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbc56aba-c3a7-4896-9895-8c43953fe326 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.229113] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 906.229113] env[61839]: value = "task-1314663" [ 906.229113] env[61839]: _type = "Task" [ 906.229113] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.237389] env[61839]: DEBUG nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 906.243452] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.279468] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314651, 'name': CopyVirtualDisk_Task, 'duration_secs': 4.014121} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.279792] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6/47c13bd8-eb3a-4bae-9aff-f4a48e5fa7b6.vmdk to [datastore1] 042183e2-d203-4d07-a668-df24ba50e242/042183e2-d203-4d07-a668-df24ba50e242.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 906.280634] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdfaf1a-c090-4552-9e35-3e816035d2e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.307645] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 042183e2-d203-4d07-a668-df24ba50e242/042183e2-d203-4d07-a668-df24ba50e242.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 906.312236] env[61839]: DEBUG nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 906.314233] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9053d963-0695-4996-8525-358ceae02fa5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.332998] env[61839]: DEBUG nova.network.neutron [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Updating instance_info_cache with network_info: [{"id": "cd0c192b-65df-4a91-85c4-d0e336f93188", "address": "fa:16:3e:d5:df:4f", "network": {"id": "cfc99418-98a0-48e0-a326-310d5c2613b9", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-652379007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ead4a6a3419348d9b5472d8f6747b9cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0c192b-65", "ovs_interfaceid": "cd0c192b-65df-4a91-85c4-d0e336f93188", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.336919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e307782d-5b08-418e-9729-3f48548ba20b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-40c54d84-8e50-483a-b4e0-5f1cc72b0880-c97479ed-b409-4c5f-950d-e8663ef7da4e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.261s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.336919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.078s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.339177] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.188s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.340326] env[61839]: DEBUG nova.objects.instance [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'resources' on Instance uuid 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.347598] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 906.347598] env[61839]: value = "task-1314664" [ 906.347598] env[61839]: _type = "Task" [ 906.347598] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.358536] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314664, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.368277] env[61839]: INFO nova.scheduler.client.report [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted allocations for instance a4463efc-ffca-4552-a072-cbf5fe062533 [ 906.371700] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.371700] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.372019] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.372079] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.372297] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.372422] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.372644] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.372992] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.373228] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.373399] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.373571] env[61839]: DEBUG nova.virt.hardware [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.377563] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678008e6-f696-4689-bbab-30f760df99cb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.388216] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917888a3-4fb1-45a6-befc-544270b4efd4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.439349] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.508194] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.509546] env[61839]: DEBUG nova.network.neutron [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Updated VIF entry in instance network info cache for port fdc0f4f9-f380-4153-b4fb-7073fe3ac06e. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 906.510027] env[61839]: DEBUG nova.network.neutron [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Updating instance_info_cache with network_info: [{"id": "fdc0f4f9-f380-4153-b4fb-7073fe3ac06e", "address": "fa:16:3e:5f:c5:df", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdc0f4f9-f3", "ovs_interfaceid": "fdc0f4f9-f380-4153-b4fb-7073fe3ac06e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.511443] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4713ec6-2cba-42db-ae82-2288f4aa7354 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.523055] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 906.523055] env[61839]: value = "task-1314665" [ 906.523055] env[61839]: _type = "Task" [ 906.523055] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.529680] env[61839]: DEBUG nova.objects.instance [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lazy-loading 'flavor' on Instance uuid 56369316-a445-4a2a-a0a6-967074104e19 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.535335] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.563891] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314662, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.587428] env[61839]: DEBUG oslo_vmware.api [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.81993} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.587772] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.588026] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.588254] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.588464] env[61839]: INFO nova.compute.manager [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Took 1.69 seconds to destroy the instance on the hypervisor. [ 906.588750] env[61839]: DEBUG oslo.service.loopingcall [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.588994] env[61839]: DEBUG nova.compute.manager [-] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.589146] env[61839]: DEBUG nova.network.neutron [-] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.741212] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314663, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.760614] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.840137] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Releasing lock "refresh_cache-bac4c882-a23d-412f-ae98-f4f21d86681a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.840628] env[61839]: DEBUG nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Instance network_info: |[{"id": "cd0c192b-65df-4a91-85c4-d0e336f93188", "address": "fa:16:3e:d5:df:4f", "network": {"id": "cfc99418-98a0-48e0-a326-310d5c2613b9", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-652379007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ead4a6a3419348d9b5472d8f6747b9cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0c192b-65", "ovs_interfaceid": "cd0c192b-65df-4a91-85c4-d0e336f93188", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 906.841295] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:df:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd0c192b-65df-4a91-85c4-d0e336f93188', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 906.851225] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Creating folder: Project (ead4a6a3419348d9b5472d8f6747b9cb). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 906.851927] env[61839]: DEBUG nova.objects.instance [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'numa_topology' on Instance uuid 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.853223] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5465e964-03da-49e3-8be5-e573fd77f580 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.868837] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314664, 'name': ReconfigVM_Task, 'duration_secs': 0.511023} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.869141] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 042183e2-d203-4d07-a668-df24ba50e242/042183e2-d203-4d07-a668-df24ba50e242.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.869937] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-caadff42-47b7-4f34-ae12-ef4ac3d59d5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.873478] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Created folder: Project (ead4a6a3419348d9b5472d8f6747b9cb) in parent group-v281288. [ 906.873669] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Creating folder: Instances. Parent ref: group-v281401. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 906.874386] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdb9dcf9-b02e-4ee5-a1c4-d8014514a47f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.878981] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 906.878981] env[61839]: value = "task-1314667" [ 906.878981] env[61839]: _type = "Task" [ 906.878981] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.886795] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Created folder: Instances in parent group-v281401. [ 906.887064] env[61839]: DEBUG oslo.service.loopingcall [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.887826] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5efce340-2499-408f-bda7-cfc4729c36d5 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a4463efc-ffca-4552-a072-cbf5fe062533" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.523s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.888426] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 906.888763] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-932f57ae-d5ca-4825-be6e-304c3849112c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.909167] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314667, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.916944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.917343] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.917636] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.917914] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.918187] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.920595] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 906.920595] env[61839]: value = "task-1314669" [ 906.920595] env[61839]: _type = "Task" [ 906.920595] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.925335] env[61839]: INFO nova.compute.manager [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Terminating instance [ 906.928242] env[61839]: DEBUG nova.compute.manager [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 906.928242] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.928945] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e24a910-b973-4013-84ee-2df4de76a8e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.939493] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314669, 'name': CreateVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.945502] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.948210] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.949067] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-581f46c8-828d-4f17-94ab-d9ab4b93bcf7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.957791] env[61839]: DEBUG oslo_vmware.api [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 906.957791] env[61839]: value = "task-1314670" [ 906.957791] env[61839]: _type = "Task" [ 906.957791] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.968119] env[61839]: DEBUG oslo_vmware.api [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.012999] env[61839]: DEBUG nova.network.neutron [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Successfully updated port: 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 907.014760] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f18a0c6-7130-4dd6-9cda-fb2d0228c460 req-400f6f8f-5923-4b68-9d34-d5c8b98f85ef service nova] Releasing lock "refresh_cache-0d42326a-9958-463a-90ae-34fb55e99c5b" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.034449] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314665, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.039717] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7e58e74a-aa1f-4c3c-9aa4-d23c1b92735c tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.367s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.064712] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314662, 'name': CreateSnapshot_Task, 'duration_secs': 0.546571} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.064940] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 907.065779] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65143128-89f4-4b84-905e-4c782735a5de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.240406] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314663, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558615} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.241159] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 0d42326a-9958-463a-90ae-34fb55e99c5b/0d42326a-9958-463a-90ae-34fb55e99c5b.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.241159] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.241335] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-491bb7b5-02c2-4240-bc0c-d85e23f0f93c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.248531] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 907.248531] env[61839]: value = "task-1314671" [ 907.248531] env[61839]: _type = "Task" [ 907.248531] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.257265] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314671, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.357170] env[61839]: DEBUG nova.objects.base [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Object Instance<694a5d4b-3673-406b-a24a-d37fad33e549> lazy-loaded attributes: resources,numa_topology {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 907.393783] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314667, 'name': Rename_Task, 'duration_secs': 0.169789} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.393946] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.394504] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31c03573-2eab-4996-b364-b878fbec0a6b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.404033] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 907.404033] env[61839]: value = "task-1314672" [ 907.404033] env[61839]: _type = "Task" [ 907.404033] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.415796] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.431751] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314669, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.448811] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.468725] env[61839]: DEBUG oslo_vmware.api [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314670, 'name': PowerOffVM_Task, 'duration_secs': 0.277173} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.471400] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.471601] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.472118] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5612f5b3-127a-43a6-b24f-17bcf1a413f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.517337] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.517591] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.517799] env[61839]: DEBUG nova.network.neutron [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.521823] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Received event network-vif-plugged-cd0c192b-65df-4a91-85c4-d0e336f93188 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 907.522143] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Acquiring lock "bac4c882-a23d-412f-ae98-f4f21d86681a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.522368] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.522539] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.522816] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] No waiting events found dispatching network-vif-plugged-cd0c192b-65df-4a91-85c4-d0e336f93188 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 907.522888] env[61839]: WARNING nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Received unexpected event network-vif-plugged-cd0c192b-65df-4a91-85c4-d0e336f93188 for instance with vm_state building and task_state spawning. [ 907.523190] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Received event network-changed-cd0c192b-65df-4a91-85c4-d0e336f93188 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 907.523407] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Refreshing instance network info cache due to event network-changed-cd0c192b-65df-4a91-85c4-d0e336f93188. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 907.523644] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Acquiring lock "refresh_cache-bac4c882-a23d-412f-ae98-f4f21d86681a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.524610] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Acquired lock "refresh_cache-bac4c882-a23d-412f-ae98-f4f21d86681a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.524805] env[61839]: DEBUG nova.network.neutron [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Refreshing network info cache for port cd0c192b-65df-4a91-85c4-d0e336f93188 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.541879] env[61839]: DEBUG oslo_vmware.api [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314665, 'name': PowerOnVM_Task, 'duration_secs': 0.57299} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.544855] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.545525] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc025d6-10c2-4e09-b5f6-377b5ab1692a tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance 'fa8a2265-291e-4424-bea1-72574e495a72' progress to 100 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 907.587348] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 907.591649] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8d49c908-814a-4479-94c6-1f9b07fb1e99 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.598310] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.598536] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.599901] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Deleting the datastore file [datastore2] 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.600092] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ae88a1b-e66a-4075-9fd5-3b9741c5a9fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.608743] env[61839]: DEBUG oslo_vmware.api [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 907.608743] env[61839]: value = "task-1314674" [ 907.608743] env[61839]: _type = "Task" [ 907.608743] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.611243] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 907.611243] env[61839]: value = "task-1314675" [ 907.611243] env[61839]: _type = "Task" [ 907.611243] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.637214] env[61839]: DEBUG oslo_vmware.api [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.647935] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314675, 'name': CloneVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.706374] env[61839]: DEBUG nova.compute.manager [req-9a8bc790-f82a-4c6f-94da-d1c113985ba5 req-f30209c4-ee3b-4164-ab1a-ca98d9df01e4 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Received event network-vif-deleted-4373753c-2ab4-4f61-8117-89f623225621 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 907.706583] env[61839]: INFO nova.compute.manager [req-9a8bc790-f82a-4c6f-94da-d1c113985ba5 req-f30209c4-ee3b-4164-ab1a-ca98d9df01e4 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Neutron deleted interface 4373753c-2ab4-4f61-8117-89f623225621; detaching it from the instance and deleting it from the info cache [ 907.706849] env[61839]: DEBUG nova.network.neutron [req-9a8bc790-f82a-4c6f-94da-d1c113985ba5 req-f30209c4-ee3b-4164-ab1a-ca98d9df01e4 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.760215] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cc41c8-7523-417d-8b12-a2e9ea022f65 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.767425] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314671, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190783} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.768170] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 907.769052] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1916fa62-f08a-485f-8ab4-cec4b44437d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.775402] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf95d44b-469c-4ffb-b57c-e91338caa9fe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.797627] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 0d42326a-9958-463a-90ae-34fb55e99c5b/0d42326a-9958-463a-90ae-34fb55e99c5b.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.798623] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c1ea515-3edd-43c9-b106-da8e27ecc592 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.843956] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99421ee2-882f-4546-bec4-df3918fb8144 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.852186] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848b089a-5726-4ae0-afe5-11ed97fae602 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.856729] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 907.856729] env[61839]: value = "task-1314676" [ 907.856729] env[61839]: _type = "Task" [ 907.856729] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.869111] env[61839]: DEBUG nova.compute.provider_tree [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.878835] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.915191] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314672, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.933906] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314669, 'name': CreateVM_Task, 'duration_secs': 0.655999} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.937407] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 907.938170] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.938346] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.938691] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 907.939409] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7429c3f3-61ac-413f-b587-500a1feb2842 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.945197] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314654, 'name': CreateVM_Task, 'duration_secs': 5.101713} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.946977] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 907.947555] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 907.947555] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5289f825-88f4-5a4b-9ee0-69909f56eae8" [ 907.947555] env[61839]: _type = "Task" [ 907.947555] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.948487] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.959354] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5289f825-88f4-5a4b-9ee0-69909f56eae8, 'name': SearchDatastore_Task, 'duration_secs': 0.010885} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.959730] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.960347] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.960347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.960347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.960569] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.964020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.964020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 907.964020] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-baa20ae9-b8a5-4aed-b968-0b56cbadc765 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.964020] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-881796b4-689b-4a65-a6d9-646e3ffe83b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.974025] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 907.974025] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52666012-709c-6dc5-1de8-6dc2f4c19004" [ 907.974025] env[61839]: _type = "Task" [ 907.974025] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.975806] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.976038] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.977206] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb6dd76-703d-4cb1-969e-9963577297a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.983194] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52666012-709c-6dc5-1de8-6dc2f4c19004, 'name': SearchDatastore_Task, 'duration_secs': 0.010272} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.983985] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.984317] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.984568] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.987856] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 907.987856] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527529ec-f694-9f72-2cf0-9f9f72975374" [ 907.987856] env[61839]: _type = "Task" [ 907.987856] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.998775] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527529ec-f694-9f72-2cf0-9f9f72975374, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.037387] env[61839]: DEBUG nova.network.neutron [-] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.091600] env[61839]: INFO nova.compute.manager [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Rescuing [ 908.091870] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.092038] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.092227] env[61839]: DEBUG nova.network.neutron [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 908.094152] env[61839]: DEBUG nova.network.neutron [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.125502] env[61839]: DEBUG oslo_vmware.api [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.385849} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.129565] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.130111] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.130415] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.130744] env[61839]: INFO nova.compute.manager [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Took 1.20 seconds to destroy the instance on the hypervisor. [ 908.131130] env[61839]: DEBUG oslo.service.loopingcall [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.131623] env[61839]: DEBUG nova.compute.manager [-] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 908.131815] env[61839]: DEBUG nova.network.neutron [-] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.147550] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314675, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.213618] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97c1681b-3a4e-4fb4-85c4-b614a63b93e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.227495] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3a764e-1eac-4acc-9fe5-d69e7104e77f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.263431] env[61839]: DEBUG nova.compute.manager [req-9a8bc790-f82a-4c6f-94da-d1c113985ba5 req-f30209c4-ee3b-4164-ab1a-ca98d9df01e4 service nova] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Detach interface failed, port_id=4373753c-2ab4-4f61-8117-89f623225621, reason: Instance 40c54d84-8e50-483a-b4e0-5f1cc72b0880 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 908.302785] env[61839]: DEBUG nova.network.neutron [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Updated VIF entry in instance network info cache for port cd0c192b-65df-4a91-85c4-d0e336f93188. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.303119] env[61839]: DEBUG nova.network.neutron [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Updating instance_info_cache with network_info: [{"id": "cd0c192b-65df-4a91-85c4-d0e336f93188", "address": "fa:16:3e:d5:df:4f", "network": {"id": "cfc99418-98a0-48e0-a326-310d5c2613b9", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-652379007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ead4a6a3419348d9b5472d8f6747b9cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0c192b-65", "ovs_interfaceid": "cd0c192b-65df-4a91-85c4-d0e336f93188", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.374769] env[61839]: DEBUG nova.scheduler.client.report [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 908.382338] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314676, 'name': ReconfigVM_Task, 'duration_secs': 0.352471} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.384903] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 0d42326a-9958-463a-90ae-34fb55e99c5b/0d42326a-9958-463a-90ae-34fb55e99c5b.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.386248] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdf416a3-87fe-4175-b1de-e1690ef83572 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.397017] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 908.397017] env[61839]: value = "task-1314677" [ 908.397017] env[61839]: _type = "Task" [ 908.397017] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.415032] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314677, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.417989] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314672, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.499388] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527529ec-f694-9f72-2cf0-9f9f72975374, 'name': SearchDatastore_Task, 'duration_secs': 0.009972} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.500271] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f34970dd-f928-4c12-9135-d9089ab26066 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.506886] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 908.506886] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f7de5c-045c-cd1d-7988-c992b2bdfffb" [ 908.506886] env[61839]: _type = "Task" [ 908.506886] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.515497] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f7de5c-045c-cd1d-7988-c992b2bdfffb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.539683] env[61839]: INFO nova.compute.manager [-] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Took 1.95 seconds to deallocate network for instance. [ 908.556663] env[61839]: DEBUG nova.network.neutron [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updating instance_info_cache with network_info: [{"id": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "address": "fa:16:3e:17:50:4a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6f5c89-f3", "ovs_interfaceid": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.636320] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314675, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.810679] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Releasing lock "refresh_cache-bac4c882-a23d-412f-ae98-f4f21d86681a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.810995] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Received event network-vif-plugged-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.811254] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Acquiring lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.811515] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.811691] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.811865] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] No waiting events found dispatching network-vif-plugged-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 908.812083] env[61839]: WARNING nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Received unexpected event network-vif-plugged-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd for instance with vm_state building and task_state spawning. [ 908.812273] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Received event network-changed-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.812452] env[61839]: DEBUG nova.compute.manager [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Refreshing instance network info cache due to event network-changed-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 908.812632] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Acquiring lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.882515] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.543s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.889690] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.129s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.894540] env[61839]: INFO nova.compute.claims [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.908124] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314677, 'name': Rename_Task, 'duration_secs': 0.175063} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.912688] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 908.913454] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72a4ed96-b2a7-409c-8d8a-4c185efa189b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.920279] env[61839]: DEBUG oslo_vmware.api [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314672, 'name': PowerOnVM_Task, 'duration_secs': 1.2009} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.921436] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 908.921642] env[61839]: INFO nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Took 17.12 seconds to spawn the instance on the hypervisor. [ 908.921893] env[61839]: DEBUG nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 908.922258] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 908.922258] env[61839]: value = "task-1314678" [ 908.922258] env[61839]: _type = "Task" [ 908.922258] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.925020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e774714a-a6df-4390-bd0e-69690dd3f675 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.937516] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.020235] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f7de5c-045c-cd1d-7988-c992b2bdfffb, 'name': SearchDatastore_Task, 'duration_secs': 0.013644} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.020562] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.020869] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] bac4c882-a23d-412f-ae98-f4f21d86681a/bac4c882-a23d-412f-ae98-f4f21d86681a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 909.021186] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.021382] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 909.021686] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e85858ea-08b3-4e15-8e87-e792938b55bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.023703] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-085e49bd-3782-4d25-8d0e-ccd31eae836d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.031928] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 909.031928] env[61839]: value = "task-1314679" [ 909.031928] env[61839]: _type = "Task" [ 909.031928] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.041053] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.046267] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.063539] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.063889] env[61839]: DEBUG nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Instance network_info: |[{"id": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "address": "fa:16:3e:17:50:4a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6f5c89-f3", "ovs_interfaceid": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 909.064225] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Acquired lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.064469] env[61839]: DEBUG nova.network.neutron [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Refreshing network info cache for port 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.065721] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:50:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.075866] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating folder: Project (48d8c406ff504d71bba5fb74caf11c14). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 909.083717] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d800622f-cdcf-4bd5-89a1-6b879ece869f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.099621] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created folder: Project (48d8c406ff504d71bba5fb74caf11c14) in parent group-v281288. [ 909.100153] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating folder: Instances. Parent ref: group-v281405. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 909.100462] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1868246a-9bd2-4d8a-88eb-7b4f125cd0a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.115920] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created folder: Instances in parent group-v281405. [ 909.116201] env[61839]: DEBUG oslo.service.loopingcall [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.116408] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 909.116644] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6820e420-c10c-4e46-9460-53acfd1f3f62 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.142909] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314675, 'name': CloneVM_Task} progress is 95%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.144401] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.144401] env[61839]: value = "task-1314682" [ 909.144401] env[61839]: _type = "Task" [ 909.144401] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.215092] env[61839]: DEBUG nova.network.neutron [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.252542] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 909.252859] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 909.253814] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea103dbe-8291-4dac-910e-6633fc5ac7ee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.276899] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 909.276899] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52aa561b-50dc-a29d-8cd2-f3ea51bda451" [ 909.276899] env[61839]: _type = "Task" [ 909.276899] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.290021] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52aa561b-50dc-a29d-8cd2-f3ea51bda451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.408287] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d2f1d7dc-78e2-44fa-94f8-ee4c41a57b55 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.263s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.409343] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.205s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.409602] env[61839]: INFO nova.compute.manager [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Unshelving [ 909.438737] env[61839]: DEBUG oslo_vmware.api [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314678, 'name': PowerOnVM_Task, 'duration_secs': 0.472226} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.439057] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.439272] env[61839]: INFO nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Took 8.14 seconds to spawn the instance on the hypervisor. [ 909.439605] env[61839]: DEBUG nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 909.443153] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cc6afe-9f28-4d05-854d-af45a85dfcae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.457284] env[61839]: INFO nova.compute.manager [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Took 25.85 seconds to build instance. [ 909.507889] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "e3126a08-c3c6-4d9e-9a50-659085fbfec9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.508703] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e3126a08-c3c6-4d9e-9a50-659085fbfec9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.545578] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.645023] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314675, 'name': CloneVM_Task, 'duration_secs': 1.758701} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.645023] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Created linked-clone VM from snapshot [ 909.645023] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b4cd10-c7cd-4725-9545-8570a56906ff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.655407] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314682, 'name': CreateVM_Task} progress is 15%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.659393] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Uploading image c3165308-1f67-4465-ae42-29b3390c2326 {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 909.690627] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 909.690627] env[61839]: value = "vm-281404" [ 909.690627] env[61839]: _type = "VirtualMachine" [ 909.690627] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 909.691335] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-77676560-ecad-4a60-a1be-e4d90cc19305 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.700088] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease: (returnval){ [ 909.700088] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528084e5-bb48-fbdf-c892-61177cd397d4" [ 909.700088] env[61839]: _type = "HttpNfcLease" [ 909.700088] env[61839]: } obtained for exporting VM: (result){ [ 909.700088] env[61839]: value = "vm-281404" [ 909.700088] env[61839]: _type = "VirtualMachine" [ 909.700088] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 909.700920] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the lease: (returnval){ [ 909.700920] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528084e5-bb48-fbdf-c892-61177cd397d4" [ 909.700920] env[61839]: _type = "HttpNfcLease" [ 909.700920] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 909.709201] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 909.709201] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528084e5-bb48-fbdf-c892-61177cd397d4" [ 909.709201] env[61839]: _type = "HttpNfcLease" [ 909.709201] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 909.720325] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.790293] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52aa561b-50dc-a29d-8cd2-f3ea51bda451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.963462] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4887ddbd-b554-48d0-94f5-d697450e63c9 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "042183e2-d203-4d07-a668-df24ba50e242" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.113s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.967152] env[61839]: INFO nova.compute.manager [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Took 22.22 seconds to build instance. [ 910.001673] env[61839]: DEBUG nova.network.neutron [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updated VIF entry in instance network info cache for port 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.001673] env[61839]: DEBUG nova.network.neutron [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updating instance_info_cache with network_info: [{"id": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "address": "fa:16:3e:17:50:4a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6f5c89-f3", "ovs_interfaceid": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.012073] env[61839]: DEBUG nova.compute.manager [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 910.039831] env[61839]: DEBUG nova.network.neutron [-] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.056951] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.160982] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314682, 'name': CreateVM_Task} progress is 15%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.164536] env[61839]: DEBUG nova.compute.manager [req-427bd6d0-025a-4c5e-b648-1d4a59685266 req-feb652c6-cfc8-474b-9081-95f9ef39f38e service nova] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Received event network-vif-deleted-6ae0317a-8af8-438c-864f-d31b4d288dab {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.210296] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 910.210296] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528084e5-bb48-fbdf-c892-61177cd397d4" [ 910.210296] env[61839]: _type = "HttpNfcLease" [ 910.210296] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 910.210617] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 910.210617] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528084e5-bb48-fbdf-c892-61177cd397d4" [ 910.210617] env[61839]: _type = "HttpNfcLease" [ 910.210617] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 910.211441] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6b812e-98da-47f1-90fa-b77a722d0e9a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.215521] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ce21b2-7180-46bc-9463-3590f921b9eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.223305] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521eb3a2-7eab-4c0b-968b-a13546c441c1/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 910.223497] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521eb3a2-7eab-4c0b-968b-a13546c441c1/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 910.228190] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142f9ebc-4453-4b07-add5-07995c1cd52b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.292303] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.292900] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad5ec38b-9951-4bbb-b782-b4e37e791795 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.319194] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e4fa38-19f3-436b-9483-ea0d6f4b69d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.324594] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52aa561b-50dc-a29d-8cd2-f3ea51bda451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.326219] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 910.326219] env[61839]: value = "task-1314684" [ 910.326219] env[61839]: _type = "Task" [ 910.326219] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.333692] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3524cbfd-e4cf-44b8-9543-36acef5daf3b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.340527] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314684, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.354995] env[61839]: DEBUG nova.compute.provider_tree [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.359893] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5952279a-67a3-4046-aae6-5da11eb0d4f5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.422790] env[61839]: DEBUG nova.compute.utils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 910.478044] env[61839]: DEBUG oslo_concurrency.lockutils [None req-56232971-235f-4baa-8514-44b4e05db378 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.743s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.504650] env[61839]: DEBUG oslo_concurrency.lockutils [req-6ebc4c39-6b8d-43d0-89e3-d8e5722742ec req-587b6d0b-9ec7-45ad-9c0a-ac2ae2af74fb service nova] Releasing lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.541744] env[61839]: INFO nova.compute.manager [-] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Took 2.41 seconds to deallocate network for instance. [ 910.550553] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.571340] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.660249] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314682, 'name': CreateVM_Task} progress is 15%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.797379] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52aa561b-50dc-a29d-8cd2-f3ea51bda451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.839661] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314684, 'name': PowerOffVM_Task, 'duration_secs': 0.339008} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.840606] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.841656] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204b9afa-d40d-4ae5-8933-13a395b63770 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.874378] env[61839]: DEBUG nova.scheduler.client.report [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 910.877019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "fa8a2265-291e-4424-bea1-72574e495a72" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.877488] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.877835] env[61839]: DEBUG nova.compute.manager [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Going to confirm migration 1 {{(pid=61839) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 910.881258] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006f29c3-0123-448c-b902-b0ef909ab7ba {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.925369] env[61839]: INFO nova.virt.block_device [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Booting with volume 6d6015d8-0b91-403e-ae0f-19b4e71ccee2 at /dev/sdb [ 910.937886] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.938935] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24f29499-2918-4136-8e5f-6b6b27cac481 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.948847] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 910.948847] env[61839]: value = "task-1314685" [ 910.948847] env[61839]: _type = "Task" [ 910.948847] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.962594] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 910.962940] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.963483] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.973583] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3cf2cd9-559e-41a4-a9ec-15aeefb5a7a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.983868] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925ae35f-c350-4210-b814-06ffe238ad99 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.021218] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab4bdbb9-1195-4b84-bcce-77f472966889 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.030920] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0ced3d-d8f8-405f-a560-58593a516678 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.054915] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.055315] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.069625] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef462efa-b381-48ad-af0c-1bf656563905 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.078184] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54b8741-33f7-42e3-a67a-5899d1a568b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.095760] env[61839]: DEBUG nova.virt.block_device [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating existing volume attachment record: be33680c-46ec-4df7-b2e6-47f44b8600ce {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 911.161503] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314682, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.222818] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "042183e2-d203-4d07-a668-df24ba50e242" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.223364] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "042183e2-d203-4d07-a668-df24ba50e242" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.223674] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "042183e2-d203-4d07-a668-df24ba50e242-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.223901] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "042183e2-d203-4d07-a668-df24ba50e242-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.224208] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "042183e2-d203-4d07-a668-df24ba50e242-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.226979] env[61839]: INFO nova.compute.manager [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Terminating instance [ 911.230120] env[61839]: DEBUG nova.compute.manager [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 911.230247] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 911.237246] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6d7ff2-1846-44f7-8f80-1dfcf42dae95 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.245103] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 911.245103] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0295448e-af28-45bf-8fd2-7bfc33ebcd4d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.253868] env[61839]: DEBUG oslo_vmware.api [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 911.253868] env[61839]: value = "task-1314687" [ 911.253868] env[61839]: _type = "Task" [ 911.253868] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.279644] env[61839]: DEBUG oslo_vmware.api [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314687, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.304286] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52aa561b-50dc-a29d-8cd2-f3ea51bda451, 'name': SearchDatastore_Task, 'duration_secs': 1.868906} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.305452] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e78fc3f2-94f2-40d9-8740-98a744a4ea6b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.313966] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 911.313966] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521690c4-9c29-3d2c-0938-d9e5a7707633" [ 911.313966] env[61839]: _type = "Task" [ 911.313966] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.323680] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521690c4-9c29-3d2c-0938-d9e5a7707633, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.388308] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.388308] env[61839]: DEBUG nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 911.392843] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.347s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.393486] env[61839]: DEBUG nova.objects.instance [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'resources' on Instance uuid 40c54d84-8e50-483a-b4e0-5f1cc72b0880 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.502929] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.503077] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.503392] env[61839]: DEBUG nova.network.neutron [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.503510] env[61839]: DEBUG nova.objects.instance [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lazy-loading 'info_cache' on Instance uuid fa8a2265-291e-4424-bea1-72574e495a72 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.554963] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314679, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.664477] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314682, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.746584] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.747281] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.769477] env[61839]: DEBUG oslo_vmware.api [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314687, 'name': PowerOffVM_Task, 'duration_secs': 0.317559} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.769913] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.770168] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.770476] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9d419d0-5e8a-497e-89ea-cb0b9d0458e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.825572] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521690c4-9c29-3d2c-0938-d9e5a7707633, 'name': SearchDatastore_Task, 'duration_secs': 0.080443} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.826039] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.826401] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 65f34f9e-353a-4f94-8f79-9bda89451885/65f34f9e-353a-4f94-8f79-9bda89451885.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.826763] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.827047] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.827285] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-830ba434-ec21-423f-be81-94dd5bb4ec28 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.829431] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7b2f17b-4ede-4a35-9b94-421770fb7d74 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.840174] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 911.840174] env[61839]: value = "task-1314691" [ 911.840174] env[61839]: _type = "Task" [ 911.840174] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.845611] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.845883] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 911.847421] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c3b9cb2-7a0b-49d6-bba8-e24f896fa546 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.852614] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.853081] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.853327] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleting the datastore file [datastore1] 042183e2-d203-4d07-a668-df24ba50e242 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.858220] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-756ff13d-7379-491e-bd07-6240732877ba {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.860703] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314691, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.862533] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 911.862533] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b8c3ff-1971-e3f0-f33b-1416d6cd409e" [ 911.862533] env[61839]: _type = "Task" [ 911.862533] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.870036] env[61839]: DEBUG oslo_vmware.api [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 911.870036] env[61839]: value = "task-1314692" [ 911.870036] env[61839]: _type = "Task" [ 911.870036] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.876990] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b8c3ff-1971-e3f0-f33b-1416d6cd409e, 'name': SearchDatastore_Task} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.881409] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ac6deac-b70a-48c4-b904-eff122384549 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.889815] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 911.889815] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528fc972-9f0e-a3cf-d340-143cd0059b99" [ 911.889815] env[61839]: _type = "Task" [ 911.889815] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.894741] env[61839]: DEBUG nova.compute.utils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 911.896255] env[61839]: DEBUG oslo_vmware.api [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.899593] env[61839]: DEBUG nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 911.899798] env[61839]: DEBUG nova.network.neutron [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.913185] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528fc972-9f0e-a3cf-d340-143cd0059b99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.016179] env[61839]: DEBUG nova.policy [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd97bf4b8e0d6436c917cd07c779164e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd912b13e639149a3a7a7580122aba866', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 912.054641] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314679, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.642273} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.058846] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] bac4c882-a23d-412f-ae98-f4f21d86681a/bac4c882-a23d-412f-ae98-f4f21d86681a.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 912.059402] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 912.060352] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c07d1ba-60ea-4c2b-bcfb-9345fc2e5e8f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.069202] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 912.069202] env[61839]: value = "task-1314693" [ 912.069202] env[61839]: _type = "Task" [ 912.069202] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.085640] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.158980] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "0d42326a-9958-463a-90ae-34fb55e99c5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.159436] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.159678] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "0d42326a-9958-463a-90ae-34fb55e99c5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.159967] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.160164] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.166269] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314682, 'name': CreateVM_Task, 'duration_secs': 2.656108} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.169971] env[61839]: INFO nova.compute.manager [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Terminating instance [ 912.171764] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.172789] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.173031] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.173647] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 912.174201] env[61839]: DEBUG nova.compute.manager [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 912.174306] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.174657] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf60f545-3e9d-42d7-856f-1c9f9be3384f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.177299] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0983809b-5110-4fa8-866b-ce87ea07a724 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.185907] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 912.185907] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5207c49b-eb6a-6439-120f-8495d41bc8d4" [ 912.185907] env[61839]: _type = "Task" [ 912.185907] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.195960] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.196519] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22d72e96-ce28-48da-ad50-b479c3598d17 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.206986] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5207c49b-eb6a-6439-120f-8495d41bc8d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.211905] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 912.211905] env[61839]: value = "task-1314694" [ 912.211905] env[61839]: _type = "Task" [ 912.211905] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.224325] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.250103] env[61839]: DEBUG nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.288439] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d613719-0ffd-473b-9170-3b5315b96027 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.298843] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f468908b-ff46-4600-b380-24f0bc6dd46d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.338601] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adec012-6158-4809-a90b-3f64a4790e08 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.357490] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a9ad8a-7f08-4ef0-bd85-2113cb434f51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.362191] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314691, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.376336] env[61839]: DEBUG nova.compute.provider_tree [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.388952] env[61839]: DEBUG oslo_vmware.api [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374302} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.389301] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.389574] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.389776] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.389977] env[61839]: INFO nova.compute.manager [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Took 1.16 seconds to destroy the instance on the hypervisor. [ 912.391205] env[61839]: DEBUG oslo.service.loopingcall [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.391729] env[61839]: DEBUG nova.compute.manager [-] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.391729] env[61839]: DEBUG nova.network.neutron [-] [instance: 042183e2-d203-4d07-a668-df24ba50e242] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.407159] env[61839]: DEBUG nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 912.410311] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528fc972-9f0e-a3cf-d340-143cd0059b99, 'name': SearchDatastore_Task, 'duration_secs': 0.027863} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.410838] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.411083] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk. {{(pid=61839) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 912.411402] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f525cf8f-e899-4981-bf9a-a6e1f9b36159 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.420983] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 912.420983] env[61839]: value = "task-1314695" [ 912.420983] env[61839]: _type = "Task" [ 912.420983] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.431026] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.580473] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092801} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.581137] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 912.581995] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8793f081-ba75-430e-9ff4-9efe793c3ca0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.608772] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] bac4c882-a23d-412f-ae98-f4f21d86681a/bac4c882-a23d-412f-ae98-f4f21d86681a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.612069] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7bbe98a-1039-482b-b6c5-c6babbb10858 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.635457] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 912.635457] env[61839]: value = "task-1314696" [ 912.635457] env[61839]: _type = "Task" [ 912.635457] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.646756] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.700664] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5207c49b-eb6a-6439-120f-8495d41bc8d4, 'name': SearchDatastore_Task, 'duration_secs': 0.059058} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.701277] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.701631] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.701982] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.702553] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.702805] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.703202] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31264de2-0840-45e8-98ef-f1403567ff47 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.719982] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.720283] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.721440] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc183e5c-38e5-4202-90dc-68c848258c74 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.728198] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.729804] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 912.729804] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de7dd7-f76b-4a97-fe94-d8ec98485499" [ 912.729804] env[61839]: _type = "Task" [ 912.729804] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.739151] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de7dd7-f76b-4a97-fe94-d8ec98485499, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.762296] env[61839]: DEBUG nova.network.neutron [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Successfully created port: 1cf79153-ee72-44b5-817d-61edb00b0a4e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.775242] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.795698] env[61839]: DEBUG nova.network.neutron [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance_info_cache with network_info: [{"id": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "address": "fa:16:3e:48:06:f8", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471fcd5e-1e", "ovs_interfaceid": "471fcd5e-1ea3-4791-9a4d-b68197f8068e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.853927] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314691, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634177} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.854660] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 65f34f9e-353a-4f94-8f79-9bda89451885/65f34f9e-353a-4f94-8f79-9bda89451885.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 912.854980] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 912.855540] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8aebb7db-f0fc-4267-9435-1715622e2719 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.866452] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 912.866452] env[61839]: value = "task-1314697" [ 912.866452] env[61839]: _type = "Task" [ 912.866452] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.877079] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.883626] env[61839]: DEBUG nova.scheduler.client.report [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 912.936626] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314695, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.147364] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.227051] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.248450] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de7dd7-f76b-4a97-fe94-d8ec98485499, 'name': SearchDatastore_Task, 'duration_secs': 0.01723} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.251815] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7926a81c-fba4-4e53-89b7-d91fd7de7995 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.263034] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 913.263034] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c61650-cffa-1d71-d636-776e4e4d9061" [ 913.263034] env[61839]: _type = "Task" [ 913.263034] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.277646] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c61650-cffa-1d71-d636-776e4e4d9061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.298672] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-fa8a2265-291e-4424-bea1-72574e495a72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.299012] env[61839]: DEBUG nova.objects.instance [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lazy-loading 'migration_context' on Instance uuid fa8a2265-291e-4424-bea1-72574e495a72 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.382328] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.400307} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.383096] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.384377] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0914b611-e079-45ab-991b-c308bcb421af {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.390812] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.997s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.394300] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.823s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.399462] env[61839]: INFO nova.compute.claims [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.431986] env[61839]: INFO nova.scheduler.client.report [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted allocations for instance 40c54d84-8e50-483a-b4e0-5f1cc72b0880 [ 913.437245] env[61839]: DEBUG nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 913.456641] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 65f34f9e-353a-4f94-8f79-9bda89451885/65f34f9e-353a-4f94-8f79-9bda89451885.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.464139] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23971129-b632-44d3-9664-2f029f941f02 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.492041] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696132} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.494952] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 913.495273] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 913.495476] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.495696] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 913.495879] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.496601] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 913.496601] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 913.496601] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 913.496936] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 913.497222] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 913.497463] env[61839]: DEBUG nova.virt.hardware [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.499413] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk. [ 913.500246] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ef03c0-c34d-4f87-80ac-8c4ae25f2ba8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.503202] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 913.503202] env[61839]: value = "task-1314698" [ 913.503202] env[61839]: _type = "Task" [ 913.503202] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.504298] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c3f1c8-6850-40e4-8d58-dddcb05944eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.515509] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58571e4f-305f-44a0-99c6-30873388602f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.539889] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.547871] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.549299] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d880c09b-1fad-4c9a-afca-35e66e6a3ed1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.578768] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 913.578768] env[61839]: value = "task-1314699" [ 913.578768] env[61839]: _type = "Task" [ 913.578768] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.587904] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314699, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.590290] env[61839]: DEBUG nova.compute.manager [req-6413f7a1-59c1-400a-8b27-e9807c7a67d9 req-e6d44e42-4218-4d56-84f9-3500580e2886 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Received event network-vif-deleted-046a1230-64fc-4271-9dbb-bd9cab4951bd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.590498] env[61839]: INFO nova.compute.manager [req-6413f7a1-59c1-400a-8b27-e9807c7a67d9 req-e6d44e42-4218-4d56-84f9-3500580e2886 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Neutron deleted interface 046a1230-64fc-4271-9dbb-bd9cab4951bd; detaching it from the instance and deleting it from the info cache [ 913.590684] env[61839]: DEBUG nova.network.neutron [req-6413f7a1-59c1-400a-8b27-e9807c7a67d9 req-e6d44e42-4218-4d56-84f9-3500580e2886 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.623583] env[61839]: DEBUG nova.network.neutron [-] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.649780] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314696, 'name': ReconfigVM_Task, 'duration_secs': 0.790114} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.650237] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Reconfigured VM instance instance-00000054 to attach disk [datastore1] bac4c882-a23d-412f-ae98-f4f21d86681a/bac4c882-a23d-412f-ae98-f4f21d86681a.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.650842] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96c60412-3093-424b-a4b3-27eb54289a50 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.658810] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 913.658810] env[61839]: value = "task-1314701" [ 913.658810] env[61839]: _type = "Task" [ 913.658810] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.670543] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314701, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.726138] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314694, 'name': PowerOffVM_Task, 'duration_secs': 1.160438} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.726496] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.726689] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.726965] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0fa6639-a0c4-422d-91ca-c80f2b6e3de2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.773278] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c61650-cffa-1d71-d636-776e4e4d9061, 'name': SearchDatastore_Task, 'duration_secs': 0.017236} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.773593] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.773859] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 7f7b3f51-3e96-49f1-a84a-81ae649e6938/7f7b3f51-3e96-49f1-a84a-81ae649e6938.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.774161] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bc6a515-1d26-4280-9323-28691b10d480 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.782737] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 913.782737] env[61839]: value = "task-1314703" [ 913.782737] env[61839]: _type = "Task" [ 913.782737] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.792748] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.796515] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.796754] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.796939] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleting the datastore file [datastore1] 0d42326a-9958-463a-90ae-34fb55e99c5b {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.797257] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb3a541f-d329-4fa9-bf41-1ac7c60fd44d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.802661] env[61839]: DEBUG nova.objects.base [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 913.803586] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394cf4b8-2926-4107-a988-8747ad4e459a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.807547] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 913.807547] env[61839]: value = "task-1314704" [ 913.807547] env[61839]: _type = "Task" [ 913.807547] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.826201] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b04b259-0a68-4ab8-8e99-84aea633b673 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.832305] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.837018] env[61839]: DEBUG oslo_vmware.api [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 913.837018] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f8c7f8-3819-f4cf-e10e-ffc28eb147b0" [ 913.837018] env[61839]: _type = "Task" [ 913.837018] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.846532] env[61839]: DEBUG oslo_vmware.api [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f8c7f8-3819-f4cf-e10e-ffc28eb147b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.935160] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.540s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.936071] env[61839]: DEBUG nova.compute.utils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Instance e3126a08-c3c6-4d9e-9a50-659085fbfec9 could not be found. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 913.937896] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.883s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.938289] env[61839]: DEBUG nova.objects.instance [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lazy-loading 'resources' on Instance uuid 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.940414] env[61839]: DEBUG nova.compute.manager [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Instance disappeared during build. {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2513}} [ 913.940678] env[61839]: DEBUG nova.compute.manager [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Unplugging VIFs for instance {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 913.941656] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-e3126a08-c3c6-4d9e-9a50-659085fbfec9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.941656] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-e3126a08-c3c6-4d9e-9a50-659085fbfec9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.941656] env[61839]: DEBUG nova.network.neutron [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.986236] env[61839]: DEBUG oslo_concurrency.lockutils [None req-471940ce-9b23-4cd3-8f29-5ca6a8a98f90 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "40c54d84-8e50-483a-b4e0-5f1cc72b0880" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.104s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.018065] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314698, 'name': ReconfigVM_Task, 'duration_secs': 0.480562} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.018395] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 65f34f9e-353a-4f94-8f79-9bda89451885/65f34f9e-353a-4f94-8f79-9bda89451885.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.019773] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2b11d32-1062-4632-9670-76f0433eb817 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.031095] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 914.031095] env[61839]: value = "task-1314705" [ 914.031095] env[61839]: _type = "Task" [ 914.031095] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.047334] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314705, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.094357] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314699, 'name': ReconfigVM_Task, 'duration_secs': 0.337426} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.095100] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-596e5c22-741c-4322-a759-0815d2c86b1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.100662] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 56369316-a445-4a2a-a0a6-967074104e19/e497cc62-282a-4a70-9770-22d80d8a1013-rescue.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.103624] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53fbd06-2ee6-428e-9b50-abc4f1573cbc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.139692] env[61839]: INFO nova.compute.manager [-] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Took 1.75 seconds to deallocate network for instance. [ 914.149664] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93d72a82-d225-425e-a589-9f50858a2c80 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.168773] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5470f34-050f-4811-945b-aab2f749b17f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.200659] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314701, 'name': Rename_Task, 'duration_secs': 0.225527} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.200659] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.200659] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 914.200659] env[61839]: value = "task-1314706" [ 914.200659] env[61839]: _type = "Task" [ 914.200659] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.200659] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ac52edf-775f-498c-ba2c-799b5a9ca621 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.222622] env[61839]: DEBUG nova.compute.manager [req-6413f7a1-59c1-400a-8b27-e9807c7a67d9 req-e6d44e42-4218-4d56-84f9-3500580e2886 service nova] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Detach interface failed, port_id=046a1230-64fc-4271-9dbb-bd9cab4951bd, reason: Instance 042183e2-d203-4d07-a668-df24ba50e242 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 914.225654] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 914.225654] env[61839]: value = "task-1314707" [ 914.225654] env[61839]: _type = "Task" [ 914.225654] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.231492] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.243876] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.296631] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314703, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.322461] env[61839]: DEBUG oslo_vmware.api [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.255919} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.322912] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.323240] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.323515] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.323807] env[61839]: INFO nova.compute.manager [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Took 2.15 seconds to destroy the instance on the hypervisor. [ 914.324277] env[61839]: DEBUG oslo.service.loopingcall [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.324573] env[61839]: DEBUG nova.compute.manager [-] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 914.324704] env[61839]: DEBUG nova.network.neutron [-] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.350528] env[61839]: DEBUG oslo_vmware.api [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f8c7f8-3819-f4cf-e10e-ffc28eb147b0, 'name': SearchDatastore_Task, 'duration_secs': 0.018434} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.350528] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.446321] env[61839]: DEBUG nova.compute.utils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Can not refresh info_cache because instance was not found {{(pid=61839) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 914.465782] env[61839]: DEBUG nova.network.neutron [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.550031] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314705, 'name': Rename_Task, 'duration_secs': 0.426503} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.550031] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.550031] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66062a90-f708-42da-a48f-f28050e652f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.561230] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 914.561230] env[61839]: value = "task-1314708" [ 914.561230] env[61839]: _type = "Task" [ 914.561230] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.579719] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.608310] env[61839]: DEBUG nova.network.neutron [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.692472] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.717526] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314706, 'name': ReconfigVM_Task, 'duration_secs': 0.25712} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.717831] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.718147] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f28c659-34c5-4497-a8d6-4f9b3042a90a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.727352] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 914.727352] env[61839]: value = "task-1314709" [ 914.727352] env[61839]: _type = "Task" [ 914.727352] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.747091] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314709, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.756236] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314707, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.782958] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5833758e-0d7f-4565-a6a4-1ec6a6de0df3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.803160] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bb51ba-c2b7-4eae-af25-96f602ab110c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.809639] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314703, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590232} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.810139] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 7f7b3f51-3e96-49f1-a84a-81ae649e6938/7f7b3f51-3e96-49f1-a84a-81ae649e6938.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 914.810494] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 914.812202] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2883ba2a-e344-4842-bc49-c785e7df9407 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.855951] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddb51e8-a2fe-40a1-8f17-b0fc6ecf6c72 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.862105] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 914.862105] env[61839]: value = "task-1314710" [ 914.862105] env[61839]: _type = "Task" [ 914.862105] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.872489] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752645e2-cc3d-4b55-9c0f-d749c4699714 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.882574] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.897477] env[61839]: DEBUG nova.compute.provider_tree [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.076405] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314708, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.113954] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-e3126a08-c3c6-4d9e-9a50-659085fbfec9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.113954] env[61839]: DEBUG nova.compute.manager [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61839) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 915.113954] env[61839]: DEBUG nova.compute.manager [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 915.113954] env[61839]: DEBUG nova.network.neutron [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 915.140823] env[61839]: DEBUG nova.network.neutron [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.243187] env[61839]: DEBUG oslo_vmware.api [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314709, 'name': PowerOnVM_Task, 'duration_secs': 0.469091} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.244074] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.250059] env[61839]: DEBUG oslo_vmware.api [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314707, 'name': PowerOnVM_Task, 'duration_secs': 0.665276} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.250773] env[61839]: DEBUG nova.compute.manager [None req-9dd4913a-7b16-4970-942a-02a177c420e2 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 915.251140] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.251404] env[61839]: INFO nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Took 11.47 seconds to spawn the instance on the hypervisor. [ 915.251631] env[61839]: DEBUG nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 915.252796] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ba1f9a-2a7e-43b0-b231-0aeb9fcae710 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.256098] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00da6c19-a938-4d6e-ab23-4a95205a236b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.373055] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085345} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.373299] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 915.374081] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0036ea9-921d-4605-8d10-148aae3c7388 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.397196] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 7f7b3f51-3e96-49f1-a84a-81ae649e6938/7f7b3f51-3e96-49f1-a84a-81ae649e6938.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 915.397545] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e8e62f9-cdad-4a59-bdaf-15bf00d00a16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.414073] env[61839]: DEBUG nova.scheduler.client.report [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.424395] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 915.424395] env[61839]: value = "task-1314711" [ 915.424395] env[61839]: _type = "Task" [ 915.424395] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.433965] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314711, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.570235] env[61839]: DEBUG nova.compute.manager [req-6a516ef9-33be-4090-9053-72c3d99894bc req-8987d9ec-ec7f-4deb-b125-8fb83f048924 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Received event network-vif-plugged-1cf79153-ee72-44b5-817d-61edb00b0a4e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.570450] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a516ef9-33be-4090-9053-72c3d99894bc req-8987d9ec-ec7f-4deb-b125-8fb83f048924 service nova] Acquiring lock "12087baa-e700-4977-b2df-3aa2c56cc2f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.570746] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a516ef9-33be-4090-9053-72c3d99894bc req-8987d9ec-ec7f-4deb-b125-8fb83f048924 service nova] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.570901] env[61839]: DEBUG oslo_concurrency.lockutils [req-6a516ef9-33be-4090-9053-72c3d99894bc req-8987d9ec-ec7f-4deb-b125-8fb83f048924 service nova] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.571136] env[61839]: DEBUG nova.compute.manager [req-6a516ef9-33be-4090-9053-72c3d99894bc req-8987d9ec-ec7f-4deb-b125-8fb83f048924 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] No waiting events found dispatching network-vif-plugged-1cf79153-ee72-44b5-817d-61edb00b0a4e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 915.571349] env[61839]: WARNING nova.compute.manager [req-6a516ef9-33be-4090-9053-72c3d99894bc req-8987d9ec-ec7f-4deb-b125-8fb83f048924 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Received unexpected event network-vif-plugged-1cf79153-ee72-44b5-817d-61edb00b0a4e for instance with vm_state building and task_state spawning. [ 915.577959] env[61839]: DEBUG oslo_vmware.api [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314708, 'name': PowerOnVM_Task, 'duration_secs': 0.62917} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.578304] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.578563] env[61839]: INFO nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Took 17.71 seconds to spawn the instance on the hypervisor. [ 915.578775] env[61839]: DEBUG nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 915.579896] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9b56c6-f9b3-40e5-985b-adddf76d731f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.643701] env[61839]: DEBUG nova.network.neutron [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.763459] env[61839]: DEBUG nova.network.neutron [-] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.788643] env[61839]: INFO nova.compute.manager [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Took 24.35 seconds to build instance. [ 915.876420] env[61839]: DEBUG nova.network.neutron [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Successfully updated port: 1cf79153-ee72-44b5-817d-61edb00b0a4e {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.922033] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.923382] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.148s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.926676] env[61839]: INFO nova.compute.claims [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.946930] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.951798] env[61839]: INFO nova.scheduler.client.report [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Deleted allocations for instance 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f [ 915.982227] env[61839]: DEBUG nova.compute.manager [req-4b8908db-a013-4736-87e3-12314944097f req-593e73bd-6ba9-48da-a573-e93204dd9501 service nova] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Received event network-vif-deleted-fdc0f4f9-f380-4153-b4fb-7073fe3ac06e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.101141] env[61839]: INFO nova.compute.manager [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Took 29.95 seconds to build instance. [ 916.149107] env[61839]: INFO nova.compute.manager [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e3126a08-c3c6-4d9e-9a50-659085fbfec9] Took 1.03 seconds to deallocate network for instance. [ 916.265567] env[61839]: INFO nova.compute.manager [-] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Took 1.94 seconds to deallocate network for instance. [ 916.293248] env[61839]: DEBUG oslo_concurrency.lockutils [None req-40823cea-b50f-4b8d-809e-3a11293f7329 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.868s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.380211] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "refresh_cache-12087baa-e700-4977-b2df-3aa2c56cc2f6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.380425] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquired lock "refresh_cache-12087baa-e700-4977-b2df-3aa2c56cc2f6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.380643] env[61839]: DEBUG nova.network.neutron [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.443543] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314711, 'name': ReconfigVM_Task, 'duration_secs': 0.608495} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.447040] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 7f7b3f51-3e96-49f1-a84a-81ae649e6938/7f7b3f51-3e96-49f1-a84a-81ae649e6938.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.447040] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25f76f3c-98b0-4690-9c55-deedd8106a89 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.455534] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 916.455534] env[61839]: value = "task-1314712" [ 916.455534] env[61839]: _type = "Task" [ 916.455534] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.464623] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6ddf676-45e1-46d7-86c0-b27d63a4d858 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "3f86a0d5-30fd-42cc-bd40-14bce9d0e56f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.547s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.469319] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314712, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.603204] env[61839]: DEBUG oslo_concurrency.lockutils [None req-095248fd-e632-43a1-88be-4cc8540a5623 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "65f34f9e-353a-4f94-8f79-9bda89451885" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.477s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.720283] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.720856] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "d718d866-dd6c-4332-b63a-be6850a5a785" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.771951] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.968152] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314712, 'name': Rename_Task, 'duration_secs': 0.244687} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.971408] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 916.972230] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee7908d9-cde3-4c8a-a5df-fe6fab178434 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.975714] env[61839]: DEBUG nova.network.neutron [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.981608] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 916.981608] env[61839]: value = "task-1314713" [ 916.981608] env[61839]: _type = "Task" [ 916.981608] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.995577] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.008166] env[61839]: INFO nova.compute.manager [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Unrescuing [ 917.008365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.008678] env[61839]: DEBUG oslo_concurrency.lockutils [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquired lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.008984] env[61839]: DEBUG nova.network.neutron [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.102795] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "65f34f9e-353a-4f94-8f79-9bda89451885" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.103020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "65f34f9e-353a-4f94-8f79-9bda89451885" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.103397] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "65f34f9e-353a-4f94-8f79-9bda89451885-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.104097] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "65f34f9e-353a-4f94-8f79-9bda89451885-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.104289] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "65f34f9e-353a-4f94-8f79-9bda89451885-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.107269] env[61839]: INFO nova.compute.manager [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Terminating instance [ 917.111972] env[61839]: DEBUG nova.compute.manager [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 917.112263] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.113131] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631ccaab-04e8-4435-a630-73c37382c768 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.122502] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.122839] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f507efc0-3bce-43af-856d-2dbacfb58c12 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.130944] env[61839]: DEBUG oslo_vmware.api [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 917.130944] env[61839]: value = "task-1314714" [ 917.130944] env[61839]: _type = "Task" [ 917.130944] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.143821] env[61839]: DEBUG oslo_vmware.api [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.176889] env[61839]: DEBUG oslo_concurrency.lockutils [None req-66af4a22-0818-42a5-aa42-24ba00cd1977 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e3126a08-c3c6-4d9e-9a50-659085fbfec9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 7.668s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.224362] env[61839]: DEBUG nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 917.236199] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.278808] env[61839]: DEBUG nova.network.neutron [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Updating instance_info_cache with network_info: [{"id": "1cf79153-ee72-44b5-817d-61edb00b0a4e", "address": "fa:16:3e:92:fe:b9", "network": {"id": "dfec77b1-56bc-47d2-90e1-4dc90b186955", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2081362157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d912b13e639149a3a7a7580122aba866", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf79153-ee", "ovs_interfaceid": "1cf79153-ee72-44b5-817d-61edb00b0a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.287142] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a78bc8-83d1-4920-9987-2f352cbd81c6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.297723] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e1c3d7-97e7-43cc-bf8f-c561757be407 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.338642] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21db36c-dab6-4412-b01f-e03deff8fd12 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.348809] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edf1814-d6b9-47ce-b426-2aef6ed361e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.373239] env[61839]: DEBUG nova.compute.provider_tree [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.495349] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314713, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.646765] env[61839]: DEBUG oslo_vmware.api [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314714, 'name': PowerOffVM_Task, 'duration_secs': 0.286076} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.647183] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.647777] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.647777] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-757792e2-bbb5-4b18-8cad-c0c30eac557a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.757477] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.780192] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.780480] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.781905] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Deleting the datastore file [datastore1] 65f34f9e-353a-4f94-8f79-9bda89451885 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.781905] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1de2df5-2a5f-42d8-b2bc-80ff64651ee8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.789030] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Releasing lock "refresh_cache-12087baa-e700-4977-b2df-3aa2c56cc2f6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.789030] env[61839]: DEBUG nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Instance network_info: |[{"id": "1cf79153-ee72-44b5-817d-61edb00b0a4e", "address": "fa:16:3e:92:fe:b9", "network": {"id": "dfec77b1-56bc-47d2-90e1-4dc90b186955", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2081362157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d912b13e639149a3a7a7580122aba866", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf79153-ee", "ovs_interfaceid": "1cf79153-ee72-44b5-817d-61edb00b0a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 917.789030] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:fe:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cf79153-ee72-44b5-817d-61edb00b0a4e', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.797764] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Creating folder: Project (d912b13e639149a3a7a7580122aba866). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 917.799770] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-691a11b4-2eb9-497c-9c55-e7f45e9f3a3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.802070] env[61839]: DEBUG oslo_vmware.api [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for the task: (returnval){ [ 917.802070] env[61839]: value = "task-1314716" [ 917.802070] env[61839]: _type = "Task" [ 917.802070] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.817051] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Created folder: Project (d912b13e639149a3a7a7580122aba866) in parent group-v281288. [ 917.817051] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Creating folder: Instances. Parent ref: group-v281410. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 917.817051] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fccff610-5c92-42f0-9c76-1f2dd413a0b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.820959] env[61839]: DEBUG oslo_vmware.api [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.833266] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Created folder: Instances in parent group-v281410. [ 917.834024] env[61839]: DEBUG oslo.service.loopingcall [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.834501] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.834938] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99bb3f6a-5ff6-4eaa-b00e-1baa0be0d443 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.861808] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.861808] env[61839]: value = "task-1314719" [ 917.861808] env[61839]: _type = "Task" [ 917.861808] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.874669] env[61839]: DEBUG nova.scheduler.client.report [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.878633] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314719, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.979978] env[61839]: DEBUG nova.network.neutron [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.993588] env[61839]: DEBUG oslo_vmware.api [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314713, 'name': PowerOnVM_Task, 'duration_secs': 0.687408} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.993902] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 917.994236] env[61839]: INFO nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Took 11.68 seconds to spawn the instance on the hypervisor. [ 917.994451] env[61839]: DEBUG nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 917.995307] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65d316a-9535-439c-ac09-2022f2bd1047 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.187993] env[61839]: DEBUG nova.compute.manager [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Received event network-changed-1cf79153-ee72-44b5-817d-61edb00b0a4e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 918.188533] env[61839]: DEBUG nova.compute.manager [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Refreshing instance network info cache due to event network-changed-1cf79153-ee72-44b5-817d-61edb00b0a4e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 918.189409] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] Acquiring lock "refresh_cache-12087baa-e700-4977-b2df-3aa2c56cc2f6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.189650] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] Acquired lock "refresh_cache-12087baa-e700-4977-b2df-3aa2c56cc2f6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.189842] env[61839]: DEBUG nova.network.neutron [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Refreshing network info cache for port 1cf79153-ee72-44b5-817d-61edb00b0a4e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.318744] env[61839]: DEBUG oslo_vmware.api [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Task: {'id': task-1314716, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344925} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.318744] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.318744] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.318744] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.318744] env[61839]: INFO nova.compute.manager [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Took 1.21 seconds to destroy the instance on the hypervisor. [ 918.318744] env[61839]: DEBUG oslo.service.loopingcall [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.318744] env[61839]: DEBUG nova.compute.manager [-] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 918.318744] env[61839]: DEBUG nova.network.neutron [-] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.375160] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314719, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.383837] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.384927] env[61839]: DEBUG nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 918.390185] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.039s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.482891] env[61839]: DEBUG oslo_concurrency.lockutils [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Releasing lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.483880] env[61839]: DEBUG nova.objects.instance [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lazy-loading 'flavor' on Instance uuid 56369316-a445-4a2a-a0a6-967074104e19 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.521029] env[61839]: INFO nova.compute.manager [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Took 24.25 seconds to build instance. [ 918.873883] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314719, 'name': CreateVM_Task, 'duration_secs': 0.584285} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.874378] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 918.875290] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.875591] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.875984] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.876450] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0061e667-c801-48ad-a78b-a178226a5e2f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.882768] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 918.882768] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521388ad-28ba-38ae-9e1e-9f681f8da949" [ 918.882768] env[61839]: _type = "Task" [ 918.882768] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.894791] env[61839]: DEBUG nova.compute.utils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 918.898687] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521388ad-28ba-38ae-9e1e-9f681f8da949, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.902097] env[61839]: DEBUG nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 918.902097] env[61839]: DEBUG nova.network.neutron [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 918.990118] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a56047-edfc-4467-a0cd-0cd47aacd31e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.019893] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 919.023627] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a37ab14d-8f42-4f62-be5d-fa2b269aaecf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.025754] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c8b7ef29-926a-40c9-9023-eb5013fb90e1 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.772s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.032055] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 919.032055] env[61839]: value = "task-1314720" [ 919.032055] env[61839]: _type = "Task" [ 919.032055] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.034254] env[61839]: DEBUG nova.policy [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '430b14eaa0e94ef39fb0f95269448ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25686a503d044467a1d641f14e14c65c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 919.048211] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.198717] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfa30ec-de11-4a1a-9406-c3f6d5da64c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.207624] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b63e75-5006-48ca-acf0-24553e20992e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.252644] env[61839]: DEBUG nova.network.neutron [-] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.253779] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45cb230-087f-48ef-a42b-506ebe5effd9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.264089] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b069d168-ac11-40e2-90ef-28aa0ee6467f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.282791] env[61839]: DEBUG nova.compute.provider_tree [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.367595] env[61839]: DEBUG nova.network.neutron [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Updated VIF entry in instance network info cache for port 1cf79153-ee72-44b5-817d-61edb00b0a4e. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.368354] env[61839]: DEBUG nova.network.neutron [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Updating instance_info_cache with network_info: [{"id": "1cf79153-ee72-44b5-817d-61edb00b0a4e", "address": "fa:16:3e:92:fe:b9", "network": {"id": "dfec77b1-56bc-47d2-90e1-4dc90b186955", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2081362157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d912b13e639149a3a7a7580122aba866", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf79153-ee", "ovs_interfaceid": "1cf79153-ee72-44b5-817d-61edb00b0a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.395908] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521388ad-28ba-38ae-9e1e-9f681f8da949, 'name': SearchDatastore_Task, 'duration_secs': 0.013592} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.396359] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.396656] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.396909] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.397178] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.397263] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 919.397713] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29c2a56f-d2ce-454c-83b5-7f773ae6100c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.404059] env[61839]: DEBUG nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 919.409753] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 919.409753] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 919.423047] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24a6a64b-69d8-4c8d-89f1-1a53f33ece41 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.423047] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 919.423047] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5226fd68-4603-6b96-5393-4984a2b591d0" [ 919.423047] env[61839]: _type = "Task" [ 919.423047] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.426026] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5226fd68-4603-6b96-5393-4984a2b591d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.465625] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521eb3a2-7eab-4c0b-968b-a13546c441c1/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 919.467586] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed0bcbe-a78c-4ea4-a564-305b2bddea26 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.476042] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521eb3a2-7eab-4c0b-968b-a13546c441c1/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 919.476225] env[61839]: ERROR oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521eb3a2-7eab-4c0b-968b-a13546c441c1/disk-0.vmdk due to incomplete transfer. [ 919.476717] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c779825e-cd70-43b3-88a4-42e6b94fdf16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.485692] env[61839]: DEBUG oslo_vmware.rw_handles [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521eb3a2-7eab-4c0b-968b-a13546c441c1/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 919.485692] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Uploaded image c3165308-1f67-4465-ae42-29b3390c2326 to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 919.487745] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 919.488024] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4d2195f5-95e1-47f0-a641-ee78b7137617 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.495408] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 919.495408] env[61839]: value = "task-1314721" [ 919.495408] env[61839]: _type = "Task" [ 919.495408] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.504646] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314721, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.545431] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314720, 'name': PowerOffVM_Task, 'duration_secs': 0.306576} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.545771] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.551377] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfiguring VM instance instance-0000004c to detach disk 2002 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 919.551689] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ec870e0-fe1d-4105-9588-ab4f241c4be2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.573422] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 919.573422] env[61839]: value = "task-1314722" [ 919.573422] env[61839]: _type = "Task" [ 919.573422] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.576070] env[61839]: DEBUG nova.compute.manager [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Received event network-changed-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.576695] env[61839]: DEBUG nova.compute.manager [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Refreshing instance network info cache due to event network-changed-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 919.576695] env[61839]: DEBUG oslo_concurrency.lockutils [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] Acquiring lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.576695] env[61839]: DEBUG oslo_concurrency.lockutils [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] Acquired lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.577051] env[61839]: DEBUG nova.network.neutron [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Refreshing network info cache for port 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.590327] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314722, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.734017] env[61839]: DEBUG nova.network.neutron [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Successfully created port: fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.758561] env[61839]: INFO nova.compute.manager [-] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Took 1.44 seconds to deallocate network for instance. [ 919.788344] env[61839]: DEBUG nova.scheduler.client.report [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.870981] env[61839]: DEBUG oslo_concurrency.lockutils [req-2d6b3940-c085-4788-b9d1-f573f0266256 req-16d4adef-7ebe-4ca5-9c30-30167f39ec11 service nova] Releasing lock "refresh_cache-12087baa-e700-4977-b2df-3aa2c56cc2f6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.928318] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5226fd68-4603-6b96-5393-4984a2b591d0, 'name': SearchDatastore_Task, 'duration_secs': 0.019862} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.929162] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d39d2680-9db9-4025-8df1-cd019baf077e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.935823] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 919.935823] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52abee19-642a-a9aa-78ee-9127ee5abd39" [ 919.935823] env[61839]: _type = "Task" [ 919.935823] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.944648] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52abee19-642a-a9aa-78ee-9127ee5abd39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.005789] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314721, 'name': Destroy_Task, 'duration_secs': 0.375181} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.006091] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Destroyed the VM [ 920.006344] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 920.006608] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8b7482be-8e87-4d86-95d9-a0ce3fa79f6f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.013604] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 920.013604] env[61839]: value = "task-1314723" [ 920.013604] env[61839]: _type = "Task" [ 920.013604] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.022537] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314723, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.087070] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314722, 'name': ReconfigVM_Task, 'duration_secs': 0.342343} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.087411] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfigured VM instance instance-0000004c to detach disk 2002 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 920.087636] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.088250] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7904491-cf8a-4df7-8952-6b32a92ed72a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.095889] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 920.095889] env[61839]: value = "task-1314724" [ 920.095889] env[61839]: _type = "Task" [ 920.095889] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.105642] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.121241] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.121488] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.270893] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.376921] env[61839]: DEBUG nova.compute.manager [req-9caabb0e-e7ad-45f9-a452-4a0d82160840 req-23a765c8-2dfe-4e7e-891e-a80f8945b256 service nova] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Received event network-vif-deleted-06a51f4e-0ef4-4148-b0a8-468d9345dc05 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.413104] env[61839]: DEBUG nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 920.441662] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 920.441923] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 920.442100] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.442293] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 920.442444] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.442595] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 920.442813] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 920.443375] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 920.443375] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 920.443375] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 920.443631] env[61839]: DEBUG nova.virt.hardware [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 920.444394] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4d58d3-a2dc-4b71-b358-f3725b8ff96d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.458322] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b851d4-8715-49b3-9fd1-0281a34e1e21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.462496] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52abee19-642a-a9aa-78ee-9127ee5abd39, 'name': SearchDatastore_Task, 'duration_secs': 0.017281} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.462848] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.463140] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 12087baa-e700-4977-b2df-3aa2c56cc2f6/12087baa-e700-4977-b2df-3aa2c56cc2f6.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 920.463826] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc087642-c9a9-419e-be2d-5080fef3f0da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.476246] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 920.476246] env[61839]: value = "task-1314725" [ 920.476246] env[61839]: _type = "Task" [ 920.476246] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.485904] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.525125] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314723, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.579725] env[61839]: DEBUG nova.network.neutron [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updated VIF entry in instance network info cache for port 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 920.580201] env[61839]: DEBUG nova.network.neutron [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updating instance_info_cache with network_info: [{"id": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "address": "fa:16:3e:17:50:4a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6f5c89-f3", "ovs_interfaceid": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.606739] env[61839]: DEBUG oslo_vmware.api [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314724, 'name': PowerOnVM_Task, 'duration_secs': 0.484425} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.607030] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.607265] env[61839]: DEBUG nova.compute.manager [None req-01a8e288-c797-468d-be39-aa244be63e84 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 920.608093] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c99d817-8632-4cf1-94cf-435dbaa1b244 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.624426] env[61839]: DEBUG nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 920.803814] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.414s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.806799] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.118s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.807048] env[61839]: DEBUG nova.objects.instance [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lazy-loading 'resources' on Instance uuid 042183e2-d203-4d07-a668-df24ba50e242 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.987228] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314725, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.024817] env[61839]: DEBUG oslo_vmware.api [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314723, 'name': RemoveSnapshot_Task, 'duration_secs': 0.593527} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.025179] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 921.025419] env[61839]: INFO nova.compute.manager [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Took 15.53 seconds to snapshot the instance on the hypervisor. [ 921.083346] env[61839]: DEBUG oslo_concurrency.lockutils [req-84579a04-0e01-44ef-951f-9f059cb54c83 req-38893d96-bc18-4b9d-a323-5f91996fde03 service nova] Releasing lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.148285] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.193841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "bac4c882-a23d-412f-ae98-f4f21d86681a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.193841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.193841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "bac4c882-a23d-412f-ae98-f4f21d86681a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.193841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.193841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.195247] env[61839]: INFO nova.compute.manager [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Terminating instance [ 921.197585] env[61839]: DEBUG nova.compute.manager [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 921.197960] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.198953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91188184-2492-4367-a6d0-e5c40db620f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.207474] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.208360] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f5917e1-3e0b-46a2-ae6a-f589e274bbb8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.220025] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 921.220025] env[61839]: value = "task-1314726" [ 921.220025] env[61839]: _type = "Task" [ 921.220025] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.225502] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.361436] env[61839]: INFO nova.scheduler.client.report [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted allocation for migration 833b7c1c-6dd7-46e6-b8d3-07af4e28c4d6 [ 921.491400] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720861} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.492196] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 12087baa-e700-4977-b2df-3aa2c56cc2f6/12087baa-e700-4977-b2df-3aa2c56cc2f6.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 921.492889] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 921.493179] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0d04e7e-f904-4e95-a52c-a17f8d414dde {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.503085] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 921.503085] env[61839]: value = "task-1314727" [ 921.503085] env[61839]: _type = "Task" [ 921.503085] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.510658] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.579687] env[61839]: DEBUG nova.compute.manager [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Found 3 images (rotation: 2) {{(pid=61839) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 921.579983] env[61839]: DEBUG nova.compute.manager [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Rotating out 1 backups {{(pid=61839) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4562}} [ 921.580240] env[61839]: DEBUG nova.compute.manager [None req-6791ea0c-d18a-4d55-a906-7cd1e895b298 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleting image f5330b3b-767a-4697-b50e-19123c586f85 {{(pid=61839) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4567}} [ 921.616078] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b16a37-af19-45e1-b234-33cf021e414f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.622747] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a58b52-496f-4e8f-84aa-7d73cfc6fb91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.657648] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e926ab-4a70-4b08-8132-f0f3bd821f93 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.667943] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0851bc-d092-49d5-b31e-35d5f1b3d0b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.685174] env[61839]: DEBUG nova.compute.provider_tree [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.729889] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.835571] env[61839]: DEBUG nova.compute.manager [req-8fd7a307-0ac0-4ec6-9cc8-7831c21a283f req-45e0d660-9434-4707-a4ef-aa1ed8d26e31 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Received event network-vif-plugged-fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.835879] env[61839]: DEBUG oslo_concurrency.lockutils [req-8fd7a307-0ac0-4ec6-9cc8-7831c21a283f req-45e0d660-9434-4707-a4ef-aa1ed8d26e31 service nova] Acquiring lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.836166] env[61839]: DEBUG oslo_concurrency.lockutils [req-8fd7a307-0ac0-4ec6-9cc8-7831c21a283f req-45e0d660-9434-4707-a4ef-aa1ed8d26e31 service nova] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.836299] env[61839]: DEBUG oslo_concurrency.lockutils [req-8fd7a307-0ac0-4ec6-9cc8-7831c21a283f req-45e0d660-9434-4707-a4ef-aa1ed8d26e31 service nova] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.836477] env[61839]: DEBUG nova.compute.manager [req-8fd7a307-0ac0-4ec6-9cc8-7831c21a283f req-45e0d660-9434-4707-a4ef-aa1ed8d26e31 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] No waiting events found dispatching network-vif-plugged-fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 921.836650] env[61839]: WARNING nova.compute.manager [req-8fd7a307-0ac0-4ec6-9cc8-7831c21a283f req-45e0d660-9434-4707-a4ef-aa1ed8d26e31 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Received unexpected event network-vif-plugged-fd557952-f86b-487e-9fb9-8cb7d176384e for instance with vm_state building and task_state spawning. [ 921.869790] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a4e9d78c-507d-4734-8203-a95de4d6e9b0 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.992s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.013871] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.291039} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.014250] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.015092] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49910e2b-323f-45d4-b099-58eec674a378 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.041261] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 12087baa-e700-4977-b2df-3aa2c56cc2f6/12087baa-e700-4977-b2df-3aa2c56cc2f6.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.041627] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24375a29-f7aa-4c1e-9e59-c068627f4e5a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.064084] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 922.064084] env[61839]: value = "task-1314728" [ 922.064084] env[61839]: _type = "Task" [ 922.064084] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.073482] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314728, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.189203] env[61839]: DEBUG nova.scheduler.client.report [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 922.233039] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.330253] env[61839]: DEBUG nova.network.neutron [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Successfully updated port: fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.574818] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314728, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.616337] env[61839]: DEBUG nova.compute.manager [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Received event network-changed-fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 922.617414] env[61839]: DEBUG nova.compute.manager [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Refreshing instance network info cache due to event network-changed-fd557952-f86b-487e-9fb9-8cb7d176384e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 922.617414] env[61839]: DEBUG oslo_concurrency.lockutils [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] Acquiring lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.617414] env[61839]: DEBUG oslo_concurrency.lockutils [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] Acquired lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.617414] env[61839]: DEBUG nova.network.neutron [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Refreshing network info cache for port fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.697284] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.699845] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.928s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.700200] env[61839]: DEBUG nova.objects.instance [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'resources' on Instance uuid 0d42326a-9958-463a-90ae-34fb55e99c5b {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.724625] env[61839]: INFO nova.scheduler.client.report [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted allocations for instance 042183e2-d203-4d07-a668-df24ba50e242 [ 922.732604] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314726, 'name': PowerOffVM_Task, 'duration_secs': 1.252415} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.733102] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.733298] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 922.733545] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59cceed4-65a6-48d0-a8ec-4d2abaf4dc14 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.818127] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.818399] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.818673] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Deleting the datastore file [datastore1] bac4c882-a23d-412f-ae98-f4f21d86681a {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.819077] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84a3eda1-dbf0-4beb-b781-3d2b07f3eb6e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.827885] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for the task: (returnval){ [ 922.827885] env[61839]: value = "task-1314730" [ 922.827885] env[61839]: _type = "Task" [ 922.827885] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.839098] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.839398] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.076533] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314728, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.103224] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.103537] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.103758] env[61839]: INFO nova.compute.manager [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Shelving [ 923.147934] env[61839]: DEBUG nova.network.neutron [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.237508] env[61839]: DEBUG oslo_concurrency.lockutils [None req-5f3d16f0-0f77-47fa-b4e0-12a7b5b00f45 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "042183e2-d203-4d07-a668-df24ba50e242" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.013s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.296150] env[61839]: DEBUG nova.network.neutron [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.339431] env[61839]: DEBUG oslo_vmware.api [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Task: {'id': task-1314730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292201} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.341710] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.342345] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.342345] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.342345] env[61839]: INFO nova.compute.manager [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Took 2.14 seconds to destroy the instance on the hypervisor. [ 923.342528] env[61839]: DEBUG oslo.service.loopingcall [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.343790] env[61839]: DEBUG nova.compute.manager [-] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 923.343790] env[61839]: DEBUG nova.network.neutron [-] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.495447] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b76f63-5027-4f6e-998d-e27beb4e7511 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.504159] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5636f0a9-b367-4f56-8705-8f79fdafca5d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.537953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6d2ba0-71e0-4c80-9704-5b8b4e7bc147 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.549776] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6cb18d-e97f-4d0c-a331-61b5c3b9350f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.565200] env[61839]: DEBUG nova.compute.provider_tree [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.575117] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314728, 'name': ReconfigVM_Task, 'duration_secs': 1.356884} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.575924] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 12087baa-e700-4977-b2df-3aa2c56cc2f6/12087baa-e700-4977-b2df-3aa2c56cc2f6.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 923.576297] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-135fe0ff-0946-4f4b-ab51-1d48ca4d8fd9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.584296] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 923.584296] env[61839]: value = "task-1314731" [ 923.584296] env[61839]: _type = "Task" [ 923.584296] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.594848] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314731, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.614931] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.615234] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49773320-3937-444e-bf8e-7da719b2c91e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.623685] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 923.623685] env[61839]: value = "task-1314732" [ 923.623685] env[61839]: _type = "Task" [ 923.623685] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.632654] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.700125] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.700492] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.700654] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.700847] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.701152] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.703507] env[61839]: INFO nova.compute.manager [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Terminating instance [ 923.706050] env[61839]: DEBUG nova.compute.manager [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 923.706262] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.707174] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acebd7c-3d4c-405a-8456-ea8d09b95d3e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.719705] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.720052] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1c347d1-dbc4-4286-bd46-37c23975de91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.729202] env[61839]: DEBUG oslo_vmware.api [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 923.729202] env[61839]: value = "task-1314733" [ 923.729202] env[61839]: _type = "Task" [ 923.729202] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.740717] env[61839]: DEBUG oslo_vmware.api [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.800305] env[61839]: DEBUG oslo_concurrency.lockutils [req-db2052bd-715a-4066-aef2-6200836ac98c req-4449a20b-959b-449a-92b9-66bc8a14ca8c service nova] Releasing lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.800721] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.800891] env[61839]: DEBUG nova.network.neutron [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.044705] env[61839]: INFO nova.compute.manager [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Rebuilding instance [ 924.072813] env[61839]: DEBUG nova.scheduler.client.report [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 924.094649] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314731, 'name': Rename_Task, 'duration_secs': 0.233402} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.097201] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 924.099154] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe677957-62da-4a47-9846-d62117962913 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.101154] env[61839]: DEBUG nova.compute.manager [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 924.101959] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dccc827-b868-4854-85f8-f8b1e2ec7324 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.112839] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 924.112839] env[61839]: value = "task-1314734" [ 924.112839] env[61839]: _type = "Task" [ 924.112839] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.120877] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314734, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.139616] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314732, 'name': PowerOffVM_Task, 'duration_secs': 0.297466} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.140224] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.141061] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b6c9d0-476a-4b6a-b387-cc8980ca18ad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.165403] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966c3320-03a6-4026-9339-4d22aca8a43b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.213306] env[61839]: DEBUG nova.network.neutron [-] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.241228] env[61839]: DEBUG oslo_vmware.api [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314733, 'name': PowerOffVM_Task, 'duration_secs': 0.215878} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.241554] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.241733] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.242056] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f15ef0d4-a610-4b16-94f2-507e35dffc4a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.316276] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.316598] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.316844] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleting the datastore file [datastore1] ef49a6f5-27c3-4595-af65-d6a5aa47d4e4 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.317150] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2edc856d-679f-4d1f-81f9-c7a6228eff64 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.325323] env[61839]: DEBUG oslo_vmware.api [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 924.325323] env[61839]: value = "task-1314736" [ 924.325323] env[61839]: _type = "Task" [ 924.325323] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.333980] env[61839]: DEBUG oslo_vmware.api [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.357131] env[61839]: DEBUG nova.network.neutron [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.577644] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.580211] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.344s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.580477] env[61839]: DEBUG nova.objects.instance [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'pci_requests' on Instance uuid 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.598887] env[61839]: INFO nova.scheduler.client.report [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted allocations for instance 0d42326a-9958-463a-90ae-34fb55e99c5b [ 924.614688] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.615119] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75a4fd33-30e0-412d-916e-0c3b0c4b7f5a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.627468] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314734, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.628440] env[61839]: DEBUG nova.network.neutron [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Updating instance_info_cache with network_info: [{"id": "fd557952-f86b-487e-9fb9-8cb7d176384e", "address": "fa:16:3e:c1:5a:fc", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd557952-f8", "ovs_interfaceid": "fd557952-f86b-487e-9fb9-8cb7d176384e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.634786] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 924.634786] env[61839]: value = "task-1314737" [ 924.634786] env[61839]: _type = "Task" [ 924.634786] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.642649] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "fa8a2265-291e-4424-bea1-72574e495a72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.642953] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.644205] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "fa8a2265-291e-4424-bea1-72574e495a72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.645314] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.645704] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.648642] env[61839]: INFO nova.compute.manager [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Terminating instance [ 924.651036] env[61839]: DEBUG nova.compute.manager [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 924.651279] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.655655] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79185dd-927f-44f1-b554-f6ff5fd38d70 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.658630] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.665673] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.666666] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7dc699c-a1ae-4876-aaf7-5b4f5215efff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.674633] env[61839]: DEBUG oslo_vmware.api [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 924.674633] env[61839]: value = "task-1314738" [ 924.674633] env[61839]: _type = "Task" [ 924.674633] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.680020] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 924.683648] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5e3d87b9-ac31-4d86-b111-516f33e8308a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.686342] env[61839]: DEBUG oslo_vmware.api [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314738, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.693039] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 924.693039] env[61839]: value = "task-1314739" [ 924.693039] env[61839]: _type = "Task" [ 924.693039] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.705975] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314739, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.715878] env[61839]: INFO nova.compute.manager [-] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Took 1.37 seconds to deallocate network for instance. [ 924.784104] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.784104] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.840048] env[61839]: DEBUG oslo_vmware.api [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310282} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.840048] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.840367] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.840790] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.841252] env[61839]: INFO nova.compute.manager [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 924.841805] env[61839]: DEBUG oslo.service.loopingcall [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.844021] env[61839]: DEBUG nova.compute.manager [-] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 924.844021] env[61839]: DEBUG nova.network.neutron [-] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.052418] env[61839]: DEBUG nova.compute.manager [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Received event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 925.052625] env[61839]: DEBUG nova.compute.manager [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing instance network info cache due to event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 925.052846] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] Acquiring lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.052998] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] Acquired lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.053184] env[61839]: DEBUG nova.network.neutron [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 925.085226] env[61839]: DEBUG nova.objects.instance [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'numa_topology' on Instance uuid 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.106852] env[61839]: DEBUG oslo_concurrency.lockutils [None req-38baf470-d8c9-4f5a-bcbc-11976bf3f84e tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "0d42326a-9958-463a-90ae-34fb55e99c5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.947s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.130460] env[61839]: DEBUG oslo_vmware.api [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314734, 'name': PowerOnVM_Task, 'duration_secs': 0.611241} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.130896] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.131013] env[61839]: INFO nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Took 11.69 seconds to spawn the instance on the hypervisor. [ 925.131234] env[61839]: DEBUG nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 925.132073] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a00543-742d-40e5-882a-00b0638e3c9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.137308] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.137624] env[61839]: DEBUG nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Instance network_info: |[{"id": "fd557952-f86b-487e-9fb9-8cb7d176384e", "address": "fa:16:3e:c1:5a:fc", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd557952-f8", "ovs_interfaceid": "fd557952-f86b-487e-9fb9-8cb7d176384e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 925.138102] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:5a:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd557952-f86b-487e-9fb9-8cb7d176384e', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 925.146887] env[61839]: DEBUG oslo.service.loopingcall [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.151463] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 925.151806] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c5ed1c0-25db-4fe6-abcd-280d53e5720f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.181183] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314737, 'name': PowerOffVM_Task, 'duration_secs': 0.224031} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.185179] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.186469] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.188021] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96ba1b08-f566-46ac-adbe-470692d517f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.193023] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 925.193023] env[61839]: value = "task-1314740" [ 925.193023] env[61839]: _type = "Task" [ 925.193023] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.197938] env[61839]: DEBUG oslo_vmware.api [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314738, 'name': PowerOffVM_Task, 'duration_secs': 0.182346} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.212194] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.212194] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.212194] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 925.212194] env[61839]: value = "task-1314741" [ 925.212194] env[61839]: _type = "Task" [ 925.212194] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.212194] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d9d8e6a-e0e7-44a3-85fb-643ea8838e54 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.221048] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314740, 'name': CreateVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.226299] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.226750] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314739, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.233912] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 925.234922] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 925.234922] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281364', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'name': 'volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd187e75f-39a9-467b-b5ef-e2772d9b71af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'serial': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 925.235156] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c0f0ec-aaa0-484e-80df-164aa77e6cfb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.260927] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4658d41-ecee-4cbf-bb73-de957bf743a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.272109] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3badd4d-1857-4504-86b0-770fadde255e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.295232] env[61839]: DEBUG nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 925.298639] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e140bdeb-3887-4fa6-accb-7610d8dba304 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.303215] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.307025] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.307025] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleting the datastore file [datastore2] fa8a2265-291e-4424-bea1-72574e495a72 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.307025] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f952c5ff-ec54-4195-991b-4039217eed72 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.319145] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] The volume has not been displaced from its original location: [datastore1] volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5/volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 925.326289] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 925.328046] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-830c1b31-a87d-4b23-b5ba-987fc8ea5e7b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.341682] env[61839]: DEBUG oslo_vmware.api [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 925.341682] env[61839]: value = "task-1314743" [ 925.341682] env[61839]: _type = "Task" [ 925.341682] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.349969] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 925.349969] env[61839]: value = "task-1314744" [ 925.349969] env[61839]: _type = "Task" [ 925.349969] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.353657] env[61839]: DEBUG oslo_vmware.api [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314743, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.362963] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314744, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.587384] env[61839]: INFO nova.compute.claims [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.695027] env[61839]: INFO nova.compute.manager [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Took 18.95 seconds to build instance. [ 925.713014] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314739, 'name': CreateSnapshot_Task, 'duration_secs': 0.825927} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.717233] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 925.717856] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314740, 'name': CreateVM_Task, 'duration_secs': 0.390887} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.718793] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e495392f-a970-4867-87b3-1dba6757acf0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.721532] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.722688] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.723291] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.723291] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 925.723910] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6b2b37f-8a99-4203-a38d-42de5f31c428 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.733757] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 925.733757] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cdef68-8ad8-2b11-217d-dacb00b504c4" [ 925.733757] env[61839]: _type = "Task" [ 925.733757] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.742807] env[61839]: DEBUG nova.network.neutron [-] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.743980] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cdef68-8ad8-2b11-217d-dacb00b504c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.748097] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.748347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.824420] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.852936] env[61839]: DEBUG oslo_vmware.api [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314743, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181291} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.855479] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.855479] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.855479] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.855479] env[61839]: INFO nova.compute.manager [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Took 1.20 seconds to destroy the instance on the hypervisor. [ 925.855479] env[61839]: DEBUG oslo.service.loopingcall [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.857650] env[61839]: DEBUG nova.compute.manager [-] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 925.857650] env[61839]: DEBUG nova.network.neutron [-] [instance: fa8a2265-291e-4424-bea1-72574e495a72] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.865558] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314744, 'name': ReconfigVM_Task, 'duration_secs': 0.305513} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.865824] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 925.873190] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-329f78a6-c6f0-4b52-bac3-0639752907f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.890425] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 925.890425] env[61839]: value = "task-1314745" [ 925.890425] env[61839]: _type = "Task" [ 925.890425] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.899415] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.932653] env[61839]: DEBUG oslo_concurrency.lockutils [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.932938] env[61839]: DEBUG oslo_concurrency.lockutils [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.933149] env[61839]: DEBUG nova.compute.manager [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 925.934057] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b0e0e0-a38e-430b-8c58-5055a2816343 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.941022] env[61839]: DEBUG nova.compute.manager [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61839) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 925.941601] env[61839]: DEBUG nova.objects.instance [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'flavor' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.195441] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8237778e-4e32-46ea-924d-34926c83603d tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.460s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.242085] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 926.242848] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-534017fe-084e-451b-98d2-89adafde9700 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.248977] env[61839]: INFO nova.compute.manager [-] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Took 1.41 seconds to deallocate network for instance. [ 926.252015] env[61839]: DEBUG nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 926.257709] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cdef68-8ad8-2b11-217d-dacb00b504c4, 'name': SearchDatastore_Task, 'duration_secs': 0.019507} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.261688] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.261962] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.262258] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.262441] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.262652] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.262983] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 926.262983] env[61839]: value = "task-1314746" [ 926.262983] env[61839]: _type = "Task" [ 926.262983] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.263349] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b934cdb6-15e0-44c2-b66e-8daf67317121 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.278955] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.279189] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.279899] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24b21d40-dabc-4f9f-ad9b-5451de5e979f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.285381] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 926.285381] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52065b69-a086-c047-3177-b0953298a44d" [ 926.285381] env[61839]: _type = "Task" [ 926.285381] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.295614] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52065b69-a086-c047-3177-b0953298a44d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.339248] env[61839]: DEBUG nova.network.neutron [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updated VIF entry in instance network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 926.339628] env[61839]: DEBUG nova.network.neutron [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.400663] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314745, 'name': ReconfigVM_Task, 'duration_secs': 0.254062} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.400996] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281364', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'name': 'volume-b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd187e75f-39a9-467b-b5ef-e2772d9b71af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5', 'serial': 'b82148bd-7b88-45c7-b95b-5f60f19c65e5'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 926.401353] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.402164] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842d66ab-7e5e-43d4-bafe-60b9f05a41b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.410335] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 926.410634] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8398dd62-b3e6-49da-aa35-c32ff7a4aa38 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.447571] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 926.447860] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cac80830-b187-4017-9971-0f07c9a04ed6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.455527] env[61839]: DEBUG oslo_vmware.api [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 926.455527] env[61839]: value = "task-1314748" [ 926.455527] env[61839]: _type = "Task" [ 926.455527] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.464480] env[61839]: DEBUG oslo_vmware.api [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314748, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.479255] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.479514] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.479747] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Deleting the datastore file [datastore1] d187e75f-39a9-467b-b5ef-e2772d9b71af {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.480025] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9e8f216-2928-47ec-8ff1-f3cf8a8272a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.487381] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for the task: (returnval){ [ 926.487381] env[61839]: value = "task-1314749" [ 926.487381] env[61839]: _type = "Task" [ 926.487381] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.497294] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.765459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.776416] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314746, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.780723] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.798293] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52065b69-a086-c047-3177-b0953298a44d, 'name': SearchDatastore_Task, 'duration_secs': 0.015196} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.799140] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b636675-c6b7-412c-b272-b0b276fafa1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.807129] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 926.807129] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dc1d4a-4ac9-87eb-9170-5fa3ff1afc60" [ 926.807129] env[61839]: _type = "Task" [ 926.807129] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.817680] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dc1d4a-4ac9-87eb-9170-5fa3ff1afc60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.843230] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] Releasing lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.843652] env[61839]: DEBUG nova.compute.manager [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Received event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.843716] env[61839]: DEBUG nova.compute.manager [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing instance network info cache due to event network-changed-ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.843902] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] Acquiring lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.844108] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] Acquired lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.844261] env[61839]: DEBUG nova.network.neutron [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Refreshing network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.875463] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b613fd4e-22b1-416d-91a6-cd25636e4e99 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.885342] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb039db-60b4-46d8-ae77-fdb6ca1d7449 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.919717] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dced096-f542-4af7-9712-9a2a7f139bc2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.931231] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1ddf81-1b80-41e9-aab8-d82754988c54 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.947745] env[61839]: DEBUG nova.compute.provider_tree [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.967243] env[61839]: DEBUG oslo_vmware.api [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314748, 'name': PowerOffVM_Task, 'duration_secs': 0.205249} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.967541] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.967792] env[61839]: DEBUG nova.compute.manager [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 926.968608] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b5ac2d-9b86-4d93-81a5-423f51e32e52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.997268] env[61839]: DEBUG oslo_vmware.api [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Task: {'id': task-1314749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077999} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.997539] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.997734] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.997919] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.054344] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 927.054728] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d0d3052-da6c-4eaf-963e-e5381a1733f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.065028] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5ce525-37e1-47e5-9fd7-377a9f0ff074 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.097112] env[61839]: ERROR nova.compute.manager [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Failed to detach volume b82148bd-7b88-45c7-b95b-5f60f19c65e5 from /dev/sda: nova.exception.InstanceNotFound: Instance d187e75f-39a9-467b-b5ef-e2772d9b71af could not be found. [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Traceback (most recent call last): [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self.driver.rebuild(**kwargs) [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] raise NotImplementedError() [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] NotImplementedError [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] During handling of the above exception, another exception occurred: [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Traceback (most recent call last): [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self.driver.detach_volume(context, old_connection_info, [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] return self._volumeops.detach_volume(connection_info, instance) [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self._detach_volume_vmdk(connection_info, instance) [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] stable_ref.fetch_moref(session) [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] raise exception.InstanceNotFound(instance_id=self._uuid) [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] nova.exception.InstanceNotFound: Instance d187e75f-39a9-467b-b5ef-e2772d9b71af could not be found. [ 927.097112] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.100782] env[61839]: DEBUG nova.network.neutron [-] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.103237] env[61839]: DEBUG nova.compute.manager [req-4ec8c1b0-fbdf-4df3-8ed1-33ebe9a78ec3 req-a35b6a05-955c-42af-8b38-6cb08d39f061 service nova] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Received event network-vif-deleted-b7032df2-ea05-48a6-9ba7-368194a96584 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.103430] env[61839]: DEBUG nova.compute.manager [req-4ec8c1b0-fbdf-4df3-8ed1-33ebe9a78ec3 req-a35b6a05-955c-42af-8b38-6cb08d39f061 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Received event network-vif-deleted-471fcd5e-1ea3-4791-9a4d-b68197f8068e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.103606] env[61839]: INFO nova.compute.manager [req-4ec8c1b0-fbdf-4df3-8ed1-33ebe9a78ec3 req-a35b6a05-955c-42af-8b38-6cb08d39f061 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Neutron deleted interface 471fcd5e-1ea3-4791-9a4d-b68197f8068e; detaching it from the instance and deleting it from the info cache [ 927.103802] env[61839]: DEBUG nova.network.neutron [req-4ec8c1b0-fbdf-4df3-8ed1-33ebe9a78ec3 req-a35b6a05-955c-42af-8b38-6cb08d39f061 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.237739] env[61839]: DEBUG nova.compute.utils [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Build of instance d187e75f-39a9-467b-b5ef-e2772d9b71af aborted: Failed to rebuild volume backed instance. {{(pid=61839) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 927.240678] env[61839]: ERROR nova.compute.manager [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance d187e75f-39a9-467b-b5ef-e2772d9b71af aborted: Failed to rebuild volume backed instance. [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Traceback (most recent call last): [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self.driver.rebuild(**kwargs) [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] raise NotImplementedError() [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] NotImplementedError [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] During handling of the above exception, another exception occurred: [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Traceback (most recent call last): [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3600, in _rebuild_volume_backed_instance [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self._detach_root_volume(context, instance, root_bdm) [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3579, in _detach_root_volume [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] with excutils.save_and_reraise_exception(): [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self.force_reraise() [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] raise self.value [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self.driver.detach_volume(context, old_connection_info, [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] return self._volumeops.detach_volume(connection_info, instance) [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self._detach_volume_vmdk(connection_info, instance) [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] stable_ref.fetch_moref(session) [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] raise exception.InstanceNotFound(instance_id=self._uuid) [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] nova.exception.InstanceNotFound: Instance d187e75f-39a9-467b-b5ef-e2772d9b71af could not be found. [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] During handling of the above exception, another exception occurred: [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Traceback (most recent call last): [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] yield [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3868, in rebuild_instance [ 927.240678] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self._do_rebuild_instance_with_claim( [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3954, in _do_rebuild_instance_with_claim [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self._do_rebuild_instance( [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 4146, in _do_rebuild_instance [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self._rebuild_default_impl(**kwargs) [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3723, in _rebuild_default_impl [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] self._rebuild_volume_backed_instance( [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] File "/opt/stack/nova/nova/compute/manager.py", line 3615, in _rebuild_volume_backed_instance [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] raise exception.BuildAbortException( [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] nova.exception.BuildAbortException: Build of instance d187e75f-39a9-467b-b5ef-e2772d9b71af aborted: Failed to rebuild volume backed instance. [ 927.241963] env[61839]: ERROR nova.compute.manager [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] [ 927.278301] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314746, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.284025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.284244] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.318597] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dc1d4a-4ac9-87eb-9170-5fa3ff1afc60, 'name': SearchDatastore_Task, 'duration_secs': 0.011041} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.318888] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.319180] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] e265dcd3-6ab5-44b1-85be-bad934ebdb79/e265dcd3-6ab5-44b1-85be-bad934ebdb79.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 927.319456] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5f3fdf7-c6de-4a82-8695-48102163de7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.327885] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 927.327885] env[61839]: value = "task-1314750" [ 927.327885] env[61839]: _type = "Task" [ 927.327885] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.336756] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314750, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.426044] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "12087baa-e700-4977-b2df-3aa2c56cc2f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.426327] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.426514] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "12087baa-e700-4977-b2df-3aa2c56cc2f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.426701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.426876] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.429071] env[61839]: INFO nova.compute.manager [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Terminating instance [ 927.430859] env[61839]: DEBUG nova.compute.manager [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 927.431103] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.431969] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2756387e-ca87-41f9-b2f1-958c97a22715 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.440456] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.440754] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3017f9e7-ba90-468e-9d91-199a26e74d36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.451417] env[61839]: DEBUG nova.scheduler.client.report [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 927.455169] env[61839]: DEBUG oslo_vmware.api [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 927.455169] env[61839]: value = "task-1314751" [ 927.455169] env[61839]: _type = "Task" [ 927.455169] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.466105] env[61839]: DEBUG oslo_vmware.api [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.480848] env[61839]: DEBUG oslo_concurrency.lockutils [None req-174fbd95-4416-44cd-a955-056506396bd1 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.603669] env[61839]: INFO nova.compute.manager [-] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Took 1.75 seconds to deallocate network for instance. [ 927.606684] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b850eab5-f1fb-4920-b13a-c5a2f78a8604 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.623783] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f62fd33-b8c3-4563-aab6-888556f8c194 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.635578] env[61839]: DEBUG nova.network.neutron [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updated VIF entry in instance network info cache for port ef2288b6-c4de-43f8-95c9-22511e164c36. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.636044] env[61839]: DEBUG nova.network.neutron [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [{"id": "ef2288b6-c4de-43f8-95c9-22511e164c36", "address": "fa:16:3e:c7:71:6a", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2288b6-c4", "ovs_interfaceid": "ef2288b6-c4de-43f8-95c9-22511e164c36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.669081] env[61839]: DEBUG nova.compute.manager [req-4ec8c1b0-fbdf-4df3-8ed1-33ebe9a78ec3 req-a35b6a05-955c-42af-8b38-6cb08d39f061 service nova] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Detach interface failed, port_id=471fcd5e-1ea3-4791-9a4d-b68197f8068e, reason: Instance fa8a2265-291e-4424-bea1-72574e495a72 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 927.781741] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314746, 'name': CloneVM_Task} progress is 95%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.786672] env[61839]: DEBUG nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 927.840010] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314750, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.961400] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.381s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.966325] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.209s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.967872] env[61839]: INFO nova.compute.claims [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.970848] env[61839]: DEBUG oslo_vmware.api [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314751, 'name': PowerOffVM_Task, 'duration_secs': 0.452833} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.971407] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.971603] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.971855] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fabd2b7d-601a-44ab-a406-a5ea95c0a27c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.021179] env[61839]: INFO nova.network.neutron [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating port 4c2c0fb5-064b-4c53-9498-77b9ddc16884 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 928.052252] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 928.052595] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 928.052810] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Deleting the datastore file [datastore2] 12087baa-e700-4977-b2df-3aa2c56cc2f6 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.053186] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61b0cbb5-0917-4553-acb2-11ea73710639 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.061695] env[61839]: DEBUG oslo_vmware.api [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for the task: (returnval){ [ 928.061695] env[61839]: value = "task-1314753" [ 928.061695] env[61839]: _type = "Task" [ 928.061695] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.071169] env[61839]: DEBUG oslo_vmware.api [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.115316] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.141024] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] Releasing lock "refresh_cache-56369316-a445-4a2a-a0a6-967074104e19" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.141511] env[61839]: DEBUG nova.compute.manager [req-7c4c1bc1-3fb8-47c7-b6d1-22f88cd5aa55 req-b98a05ad-89bb-4c78-8677-fc3801aa4f03 service nova] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Received event network-vif-deleted-cd0c192b-65df-4a91-85c4-d0e336f93188 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.278365] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314746, 'name': CloneVM_Task, 'duration_secs': 1.762221} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.280509] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Created linked-clone VM from snapshot [ 928.280509] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b366bcc-0220-4d1f-a361-4223ca79358e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.287894] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Uploading image 5fe38bf0-d665-4992-a6d0-c15bcea2316d {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 928.306553] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.319399] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 928.319399] env[61839]: value = "vm-281415" [ 928.319399] env[61839]: _type = "VirtualMachine" [ 928.319399] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 928.321019] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c1f7e663-29a6-41b0-87dc-54243966e7a4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.330054] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lease: (returnval){ [ 928.330054] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eda046-89e6-ed2e-ac03-d7edc1719696" [ 928.330054] env[61839]: _type = "HttpNfcLease" [ 928.330054] env[61839]: } obtained for exporting VM: (result){ [ 928.330054] env[61839]: value = "vm-281415" [ 928.330054] env[61839]: _type = "VirtualMachine" [ 928.330054] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 928.330054] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the lease: (returnval){ [ 928.330054] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eda046-89e6-ed2e-ac03-d7edc1719696" [ 928.330054] env[61839]: _type = "HttpNfcLease" [ 928.330054] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 928.347858] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314750, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578368} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.347858] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 928.347858] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eda046-89e6-ed2e-ac03-d7edc1719696" [ 928.347858] env[61839]: _type = "HttpNfcLease" [ 928.347858] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 928.347858] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] e265dcd3-6ab5-44b1-85be-bad934ebdb79/e265dcd3-6ab5-44b1-85be-bad934ebdb79.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.348374] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 928.348374] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6eb30a84-8372-4670-93a1-47e1e8844e43 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.358073] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 928.358073] env[61839]: value = "task-1314755" [ 928.358073] env[61839]: _type = "Task" [ 928.358073] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.365672] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314755, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.572279] env[61839]: DEBUG oslo_vmware.api [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Task: {'id': task-1314753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155504} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.572621] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.572841] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.573036] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.573227] env[61839]: INFO nova.compute.manager [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 928.573473] env[61839]: DEBUG oslo.service.loopingcall [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.573694] env[61839]: DEBUG nova.compute.manager [-] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 928.573800] env[61839]: DEBUG nova.network.neutron [-] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.837276] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 928.837276] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eda046-89e6-ed2e-ac03-d7edc1719696" [ 928.837276] env[61839]: _type = "HttpNfcLease" [ 928.837276] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 928.837824] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 928.837824] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eda046-89e6-ed2e-ac03-d7edc1719696" [ 928.837824] env[61839]: _type = "HttpNfcLease" [ 928.837824] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 928.838655] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675c1ec5-7e44-4a88-84fb-41e386d09af1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.853158] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52effe6d-aa41-4863-a968-04de54917d67/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 928.853371] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52effe6d-aa41-4863-a968-04de54917d67/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 928.927662] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314755, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065486} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.928256] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.929105] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbe0026-8b61-413b-b486-8352ad30a3d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.952476] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] e265dcd3-6ab5-44b1-85be-bad934ebdb79/e265dcd3-6ab5-44b1-85be-bad934ebdb79.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.953031] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03c95b23-72d3-474d-8b79-95c634ddc353 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.971746] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5e9741c5-67d9-4767-9fbb-58aecd44ed71 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.981323] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 928.981323] env[61839]: value = "task-1314756" [ 928.981323] env[61839]: _type = "Task" [ 928.981323] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.989528] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314756, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.134305] env[61839]: DEBUG nova.compute.manager [req-808a7fd5-3d5c-4cf2-b3d8-fb6188654726 req-dfb2c227-8c35-4cbe-852e-bf6b48cca239 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Received event network-vif-deleted-1cf79153-ee72-44b5-817d-61edb00b0a4e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.134598] env[61839]: INFO nova.compute.manager [req-808a7fd5-3d5c-4cf2-b3d8-fb6188654726 req-dfb2c227-8c35-4cbe-852e-bf6b48cca239 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Neutron deleted interface 1cf79153-ee72-44b5-817d-61edb00b0a4e; detaching it from the instance and deleting it from the info cache [ 929.134860] env[61839]: DEBUG nova.network.neutron [req-808a7fd5-3d5c-4cf2-b3d8-fb6188654726 req-dfb2c227-8c35-4cbe-852e-bf6b48cca239 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.255470] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6cafaf-3893-4c1f-9703-1e30c804eac9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.259831] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.277868] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1506bce-72f6-4ee0-9c29-7f7f026d0240 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.312935] env[61839]: DEBUG nova.network.neutron [-] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.314981] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc7cc23-08fb-49a9-8b6c-ace9bcbd629b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.325922] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a82bea4-c2ed-4e78-8b12-4e504890de7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.345866] env[61839]: DEBUG nova.compute.provider_tree [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.361777] env[61839]: DEBUG nova.compute.manager [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Stashing vm_state: stopped {{(pid=61839) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 929.492297] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314756, 'name': ReconfigVM_Task, 'duration_secs': 0.327879} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.492702] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Reconfigured VM instance instance-00000058 to attach disk [datastore2] e265dcd3-6ab5-44b1-85be-bad934ebdb79/e265dcd3-6ab5-44b1-85be-bad934ebdb79.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.493463] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1814f584-9e55-4d4a-860e-8ab71ed9b1fb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.503000] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 929.503000] env[61839]: value = "task-1314757" [ 929.503000] env[61839]: _type = "Task" [ 929.503000] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.513457] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314757, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.640072] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2123ddf7-9685-4c6e-8780-d85e8fceaa0c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.652169] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3552b74d-7829-4931-bf19-ec95ff47ddb5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.688659] env[61839]: DEBUG nova.compute.manager [req-808a7fd5-3d5c-4cf2-b3d8-fb6188654726 req-dfb2c227-8c35-4cbe-852e-bf6b48cca239 service nova] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Detach interface failed, port_id=1cf79153-ee72-44b5-817d-61edb00b0a4e, reason: Instance 12087baa-e700-4977-b2df-3aa2c56cc2f6 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 929.819672] env[61839]: INFO nova.compute.manager [-] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Took 1.25 seconds to deallocate network for instance. [ 929.850387] env[61839]: DEBUG nova.scheduler.client.report [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 929.886036] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.886036] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.886036] env[61839]: DEBUG nova.network.neutron [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.902361] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.015117] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314757, 'name': Rename_Task, 'duration_secs': 0.264568} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.015573] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.016205] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91c12d45-172b-4fe3-9743-3ed4343ff907 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.023367] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 930.023367] env[61839]: value = "task-1314758" [ 930.023367] env[61839]: _type = "Task" [ 930.023367] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.032883] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.311820] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquiring lock "d187e75f-39a9-467b-b5ef-e2772d9b71af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.312064] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.312295] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquiring lock "d187e75f-39a9-467b-b5ef-e2772d9b71af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.312558] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.312694] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.315061] env[61839]: INFO nova.compute.manager [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Terminating instance [ 930.317983] env[61839]: DEBUG nova.compute.manager [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 930.318374] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff033f23-6566-4ea7-99b2-374d2c17c3bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.326781] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.329847] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fd5116-f4e1-4388-b0e2-2bc9fbab3a0e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.361561] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.362197] env[61839]: DEBUG nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 930.365107] env[61839]: WARNING nova.virt.vmwareapi.driver [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance d187e75f-39a9-467b-b5ef-e2772d9b71af could not be found. [ 930.365781] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.365965] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.098s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.366318] env[61839]: DEBUG nova.objects.instance [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lazy-loading 'resources' on Instance uuid 65f34f9e-353a-4f94-8f79-9bda89451885 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.367444] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-228dd044-1fd5-4276-97df-e71d175a9136 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.378954] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e8a517-4216-446d-8b7d-4925f33dfef2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.418697] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d187e75f-39a9-467b-b5ef-e2772d9b71af could not be found. [ 930.419046] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 930.419308] env[61839]: INFO nova.compute.manager [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Took 0.10 seconds to destroy the instance on the hypervisor. [ 930.419651] env[61839]: DEBUG oslo.service.loopingcall [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.419926] env[61839]: DEBUG nova.compute.manager [-] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 930.420041] env[61839]: DEBUG nova.network.neutron [-] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 930.534083] env[61839]: DEBUG oslo_vmware.api [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314758, 'name': PowerOnVM_Task, 'duration_secs': 0.47992} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.534494] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.534884] env[61839]: INFO nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Took 10.12 seconds to spawn the instance on the hypervisor. [ 930.535151] env[61839]: DEBUG nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 930.536079] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d822796-3ceb-4a5f-90b8-27d89fc1fc43 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.867530] env[61839]: DEBUG nova.compute.utils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.869110] env[61839]: DEBUG nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 930.869397] env[61839]: DEBUG nova.network.neutron [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.906819] env[61839]: DEBUG nova.network.neutron [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.943961] env[61839]: DEBUG nova.policy [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 931.057385] env[61839]: INFO nova.compute.manager [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Took 18.30 seconds to build instance. [ 931.200403] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8425256-5d6e-463b-a7bd-f477be198edc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.210285] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c009d1-f244-46c1-b487-5572b81445fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.215321] env[61839]: DEBUG nova.compute.manager [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-vif-plugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 931.215321] env[61839]: DEBUG oslo_concurrency.lockutils [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.216234] env[61839]: DEBUG oslo_concurrency.lockutils [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.216736] env[61839]: DEBUG oslo_concurrency.lockutils [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.216810] env[61839]: DEBUG nova.compute.manager [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] No waiting events found dispatching network-vif-plugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 931.217045] env[61839]: WARNING nova.compute.manager [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received unexpected event network-vif-plugged-4c2c0fb5-064b-4c53-9498-77b9ddc16884 for instance with vm_state shelved_offloaded and task_state spawning. [ 931.217231] env[61839]: DEBUG nova.compute.manager [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 931.217436] env[61839]: DEBUG nova.compute.manager [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing instance network info cache due to event network-changed-4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 931.218431] env[61839]: DEBUG oslo_concurrency.lockutils [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] Acquiring lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.251645] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8399e964-b335-48e0-8ab6-8f0a12798a48 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.278154] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf2c7d4-eb0b-44ff-ae09-021088b65046 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.295121] env[61839]: DEBUG nova.compute.provider_tree [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.354137] env[61839]: DEBUG nova.network.neutron [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Successfully created port: 5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.372227] env[61839]: DEBUG nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 931.409117] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.411844] env[61839]: DEBUG oslo_concurrency.lockutils [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] Acquired lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.411844] env[61839]: DEBUG nova.network.neutron [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Refreshing network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.437978] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='14c8972565a6edf654b4c0d98f640122',container_format='bare',created_at=2024-10-18T16:58:59Z,direct_url=,disk_format='vmdk',id=d630a97f-7e84-429e-8613-52457a48a10a,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1361724659-shelved',owner='b73ee7e490914f54925597f38c8cc05b',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-10-18T16:59:15Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 931.438292] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 931.438451] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.438630] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 931.438799] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.438978] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 931.439204] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 931.439362] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 931.439527] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 931.441019] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 931.441019] env[61839]: DEBUG nova.virt.hardware [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 931.441292] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd84b7c-90ff-4d73-90a7-9cd5737e4baa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.449904] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079c0efb-7a15-4d12-ba0c-4b09ad3a4c05 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.468814] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:cb:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c2c0fb5-064b-4c53-9498-77b9ddc16884', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.480018] env[61839]: DEBUG oslo.service.loopingcall [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.480018] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 931.480018] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ddee600-01e5-4256-898b-c9f74a898ea7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.505683] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.505683] env[61839]: value = "task-1314759" [ 931.505683] env[61839]: _type = "Task" [ 931.505683] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.515594] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314759, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.562390] env[61839]: DEBUG oslo_concurrency.lockutils [None req-23057198-cea0-4ec8-8098-1a162c29f89f tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.815s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.799912] env[61839]: DEBUG nova.scheduler.client.report [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.803402] env[61839]: DEBUG nova.network.neutron [-] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.021610] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314759, 'name': CreateVM_Task, 'duration_secs': 0.395667} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.021802] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.022514] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.022693] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.027025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.027025] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45ba2c49-7776-4c8f-9ebb-18414475d1af {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.029846] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 932.029846] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a272fc-8a72-cb84-704b-b8d673bfd0b3" [ 932.029846] env[61839]: _type = "Task" [ 932.029846] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.040158] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a272fc-8a72-cb84-704b-b8d673bfd0b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.306034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.940s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.308762] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.161s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.310418] env[61839]: INFO nova.compute.claims [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.313258] env[61839]: INFO nova.compute.manager [-] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Took 1.89 seconds to deallocate network for instance. [ 932.334036] env[61839]: INFO nova.scheduler.client.report [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Deleted allocations for instance 65f34f9e-353a-4f94-8f79-9bda89451885 [ 932.342428] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.342611] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.342877] env[61839]: INFO nova.compute.manager [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Shelving [ 932.384193] env[61839]: DEBUG nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 932.414062] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 932.414422] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 932.414673] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.414879] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 932.415242] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.415435] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 932.415783] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 932.415999] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 932.416278] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 932.416561] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 932.416761] env[61839]: DEBUG nova.virt.hardware [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.417794] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2c8246-4631-4164-af9b-029e4f485f34 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.429742] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df0378c-38b4-4d55-b6e8-f4d762670f4e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.540756] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.541019] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Processing image d630a97f-7e84-429e-8613-52457a48a10a {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.541280] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a/d630a97f-7e84-429e-8613-52457a48a10a.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.541802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a/d630a97f-7e84-429e-8613-52457a48a10a.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.542101] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.542389] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ac1c642-5e3f-43a0-96c7-ec5f57fc93c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.563524] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.563731] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.564508] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a21288-abf1-4663-a5d0-74bb5bf02690 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.572487] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 932.572487] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52368211-dc07-665c-cb86-a183de6819f4" [ 932.572487] env[61839]: _type = "Task" [ 932.572487] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.581632] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52368211-dc07-665c-cb86-a183de6819f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.791040] env[61839]: DEBUG nova.network.neutron [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updated VIF entry in instance network info cache for port 4c2c0fb5-064b-4c53-9498-77b9ddc16884. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.791040] env[61839]: DEBUG nova.network.neutron [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [{"id": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "address": "fa:16:3e:00:cb:60", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c2c0fb5-06", "ovs_interfaceid": "4c2c0fb5-064b-4c53-9498-77b9ddc16884", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.850030] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bd480c71-e10b-4855-b0e1-479a82aad289 tempest-ServerGroupTestJSON-1627784352 tempest-ServerGroupTestJSON-1627784352-project-member] Lock "65f34f9e-353a-4f94-8f79-9bda89451885" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.746s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.858838] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.858838] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd4434d1-b251-446f-9d70-6468935c04e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.869542] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 932.869542] env[61839]: value = "task-1314760" [ 932.869542] env[61839]: _type = "Task" [ 932.869542] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.875628] env[61839]: INFO nova.compute.manager [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Took 0.56 seconds to detach 1 volumes for instance. [ 932.882172] env[61839]: DEBUG nova.compute.manager [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Deleting volume: b82148bd-7b88-45c7-b95b-5f60f19c65e5 {{(pid=61839) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 932.889075] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.084573] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Preparing fetch location {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 933.084899] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Fetch image to [datastore1] OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11/OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11.vmdk {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 933.085178] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Downloading stream optimized image d630a97f-7e84-429e-8613-52457a48a10a to [datastore1] OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11/OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11.vmdk on the data store datastore1 as vApp {{(pid=61839) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 933.085420] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Downloading image file data d630a97f-7e84-429e-8613-52457a48a10a to the ESX as VM named 'OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11' {{(pid=61839) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 933.181214] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 933.181214] env[61839]: value = "resgroup-9" [ 933.181214] env[61839]: _type = "ResourcePool" [ 933.181214] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 933.181412] env[61839]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-da4e8e3b-9e64-4b79-ac13-dc339fd940de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.206677] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease: (returnval){ [ 933.206677] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216478d-1768-7462-8d41-0004c8241d1a" [ 933.206677] env[61839]: _type = "HttpNfcLease" [ 933.206677] env[61839]: } obtained for vApp import into resource pool (val){ [ 933.206677] env[61839]: value = "resgroup-9" [ 933.206677] env[61839]: _type = "ResourcePool" [ 933.206677] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 933.207409] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the lease: (returnval){ [ 933.207409] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216478d-1768-7462-8d41-0004c8241d1a" [ 933.207409] env[61839]: _type = "HttpNfcLease" [ 933.207409] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 933.215737] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 933.215737] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216478d-1768-7462-8d41-0004c8241d1a" [ 933.215737] env[61839]: _type = "HttpNfcLease" [ 933.215737] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 933.265590] env[61839]: DEBUG nova.compute.manager [req-3f94ae5a-55ea-4d89-b071-e24be9b08699 req-b8f1e50a-8d41-4456-a16f-cc587bfd7b0a service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-vif-plugged-5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.265867] env[61839]: DEBUG oslo_concurrency.lockutils [req-3f94ae5a-55ea-4d89-b071-e24be9b08699 req-b8f1e50a-8d41-4456-a16f-cc587bfd7b0a service nova] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.265991] env[61839]: DEBUG oslo_concurrency.lockutils [req-3f94ae5a-55ea-4d89-b071-e24be9b08699 req-b8f1e50a-8d41-4456-a16f-cc587bfd7b0a service nova] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.266192] env[61839]: DEBUG oslo_concurrency.lockutils [req-3f94ae5a-55ea-4d89-b071-e24be9b08699 req-b8f1e50a-8d41-4456-a16f-cc587bfd7b0a service nova] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.266371] env[61839]: DEBUG nova.compute.manager [req-3f94ae5a-55ea-4d89-b071-e24be9b08699 req-b8f1e50a-8d41-4456-a16f-cc587bfd7b0a service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] No waiting events found dispatching network-vif-plugged-5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 933.266543] env[61839]: WARNING nova.compute.manager [req-3f94ae5a-55ea-4d89-b071-e24be9b08699 req-b8f1e50a-8d41-4456-a16f-cc587bfd7b0a service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received unexpected event network-vif-plugged-5d041d96-4a6e-44d5-a31a-e597194524e0 for instance with vm_state building and task_state spawning. [ 933.285235] env[61839]: DEBUG nova.network.neutron [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Successfully updated port: 5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.293158] env[61839]: DEBUG oslo_concurrency.lockutils [req-7feefbf2-35c5-424e-b12b-aad57688b484 req-338b7af8-e54a-48f0-9a09-d62994072f3b service nova] Releasing lock "refresh_cache-694a5d4b-3673-406b-a24a-d37fad33e549" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.308885] env[61839]: DEBUG nova.compute.manager [req-06c1a8f3-7477-45b2-b109-74c2565bf2c9 req-f5fa07b6-9011-40e5-b125-abff8025f406 service nova] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Received event network-vif-deleted-7dee6a06-890e-4663-a919-d96beac69d5d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.384276] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314760, 'name': PowerOffVM_Task, 'duration_secs': 0.236433} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.387351] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.393054] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3780fe-ab98-4b18-809a-49ebaf895119 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.420979] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ff46c5-66d6-4c35-9d52-929fd3e10e44 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.444858] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.717572] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 933.717572] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216478d-1768-7462-8d41-0004c8241d1a" [ 933.717572] env[61839]: _type = "HttpNfcLease" [ 933.717572] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 933.735161] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f5fe5b-8b08-4f49-afc4-a8aab9ac82bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.744207] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f3a96b-e0cf-4627-b2e8-f5496544ea5a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.779978] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d664ec07-595d-4bbb-8526-f4d48e0363bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.789228] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a32549-6599-43d4-a8c6-dc5eb862e448 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.794220] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.794438] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.794671] env[61839]: DEBUG nova.network.neutron [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.808816] env[61839]: DEBUG nova.compute.provider_tree [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.937244] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 933.937244] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a9374da3-a606-40fa-8690-f0bbfde07ed4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.949130] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 933.949130] env[61839]: value = "task-1314763" [ 933.949130] env[61839]: _type = "Task" [ 933.949130] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.961071] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314763, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.216148] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.216148] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216478d-1768-7462-8d41-0004c8241d1a" [ 934.216148] env[61839]: _type = "HttpNfcLease" [ 934.216148] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 934.216464] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 934.216464] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216478d-1768-7462-8d41-0004c8241d1a" [ 934.216464] env[61839]: _type = "HttpNfcLease" [ 934.216464] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 934.217260] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f675ff0-3be0-40a7-8ad8-b5e409f0b0c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.226114] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52996f3e-8ca2-8560-74ab-0a670f23c59a/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 934.226355] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52996f3e-8ca2-8560-74ab-0a670f23c59a/disk-0.vmdk. {{(pid=61839) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 934.294621] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0e3d41c0-315a-4af5-9dcf-a7254bcb1633 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.312875] env[61839]: DEBUG nova.scheduler.client.report [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 934.356228] env[61839]: DEBUG nova.network.neutron [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.460597] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314763, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.710132] env[61839]: DEBUG nova.network.neutron [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.821020] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.821020] env[61839]: DEBUG nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 934.823351] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.597s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.823893] env[61839]: DEBUG nova.objects.instance [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lazy-loading 'resources' on Instance uuid bac4c882-a23d-412f-ae98-f4f21d86681a {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.963441] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314763, 'name': CreateSnapshot_Task, 'duration_secs': 0.535228} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.965372] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 934.966480] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15ceb9c-ad63-4094-99d6-7e2a0ad08962 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.217969] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.218325] env[61839]: DEBUG nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Instance network_info: |[{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 935.218881] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:bd:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '572b7281-aad3-45fa-9cb2-fc1c70569948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d041d96-4a6e-44d5-a31a-e597194524e0', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.230333] env[61839]: DEBUG oslo.service.loopingcall [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.233489] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 935.234404] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19224764-73a9-4f9b-965f-881c23ef70b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.276446] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.276446] env[61839]: value = "task-1314764" [ 935.276446] env[61839]: _type = "Task" [ 935.276446] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.294812] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314764, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.328304] env[61839]: DEBUG nova.compute.utils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 935.337557] env[61839]: DEBUG nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 935.337823] env[61839]: DEBUG nova.network.neutron [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.352556] env[61839]: DEBUG nova.compute.manager [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-changed-5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 935.352866] env[61839]: DEBUG nova.compute.manager [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing instance network info cache due to event network-changed-5d041d96-4a6e-44d5-a31a-e597194524e0. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 935.353321] env[61839]: DEBUG oslo_concurrency.lockutils [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.353685] env[61839]: DEBUG oslo_concurrency.lockutils [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.353851] env[61839]: DEBUG nova.network.neutron [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing network info cache for port 5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.408162] env[61839]: DEBUG nova.policy [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef80f7db912a4e33a5a50e7432a01ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d28bf7713204dfb9682d9c002cb5449', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 935.492886] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 935.494395] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Completed reading data from the image iterator. {{(pid=61839) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 935.494661] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52996f3e-8ca2-8560-74ab-0a670f23c59a/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 935.498237] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9c9e1dc3-1de9-4d5b-b873-c8aad7d093bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.501956] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b6f6d2-6d1b-4564-a818-da482ed94223 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.510944] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52996f3e-8ca2-8560-74ab-0a670f23c59a/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 935.511048] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52996f3e-8ca2-8560-74ab-0a670f23c59a/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 935.512474] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9cfc9380-8d85-42a7-8c2f-f67b751a4115 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.514318] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 935.514318] env[61839]: value = "task-1314765" [ 935.514318] env[61839]: _type = "Task" [ 935.514318] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.526204] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314765, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.715590] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c71046c-aced-478b-a6de-caafd75f0380 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.725481] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ddc2f4-3e17-417d-8c7e-72c6475a5bff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.760876] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b897679-4d91-43d8-82c9-05996705f3e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.770246] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a30513-4f8f-40ad-9a5d-2a9f2e2a8c76 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.789973] env[61839]: DEBUG nova.compute.provider_tree [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.800566] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314764, 'name': CreateVM_Task, 'duration_secs': 0.423296} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.801485] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.802324] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.802646] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.802952] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 935.803536] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc9bceff-3252-4699-a430-bbc41677f365 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.810977] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 935.810977] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527532a9-0f3d-f1b3-2345-c5171aa29650" [ 935.810977] env[61839]: _type = "Task" [ 935.810977] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.824316] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527532a9-0f3d-f1b3-2345-c5171aa29650, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.850473] env[61839]: DEBUG nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 935.859691] env[61839]: DEBUG nova.network.neutron [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Successfully created port: 648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.972041] env[61839]: DEBUG oslo_vmware.rw_handles [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52996f3e-8ca2-8560-74ab-0a670f23c59a/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 935.972362] env[61839]: INFO nova.virt.vmwareapi.images [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Downloaded image file data d630a97f-7e84-429e-8613-52457a48a10a [ 935.973326] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184320db-2fe9-42fd-ac8c-f1d965592c6d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.995981] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49eff33e-e14e-42bb-825c-0c0befdd23db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.024744] env[61839]: INFO nova.virt.vmwareapi.images [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] The imported VM was unregistered [ 936.027102] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Caching image {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 936.027438] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.031368] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a060977-6c44-42fb-88ea-0d69b3df5e18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.034824] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314765, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.045451] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created directory with path [datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.045676] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11/OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11.vmdk to [datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a/d630a97f-7e84-429e-8613-52457a48a10a.vmdk. {{(pid=61839) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 936.045967] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-9b70bb4c-f1eb-4551-b6f7-3b543908ead3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.053732] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 936.053732] env[61839]: value = "task-1314767" [ 936.053732] env[61839]: _type = "Task" [ 936.053732] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.066251] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314767, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.180821] env[61839]: DEBUG nova.network.neutron [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updated VIF entry in instance network info cache for port 5d041d96-4a6e-44d5-a31a-e597194524e0. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 936.181390] env[61839]: DEBUG nova.network.neutron [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.295877] env[61839]: DEBUG nova.scheduler.client.report [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.329926] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527532a9-0f3d-f1b3-2345-c5171aa29650, 'name': SearchDatastore_Task, 'duration_secs': 0.034087} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.329926] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.329926] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 936.330079] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.330230] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.330435] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.330777] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a755072-7d4e-437d-8db3-bd111642c6ee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.348706] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.348925] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 936.349889] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-249178da-04de-4df6-83db-d8e9e9d06fe7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.361512] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 936.361512] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522b87f9-9ba5-6328-4eea-aa65c2805be5" [ 936.361512] env[61839]: _type = "Task" [ 936.361512] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.373845] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522b87f9-9ba5-6328-4eea-aa65c2805be5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.527841] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314765, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.568447] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314767, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.685734] env[61839]: DEBUG oslo_concurrency.lockutils [req-15aa8514-cd8d-406c-b3d3-06fd05031330 req-1f441a7f-79e1-4658-a067-7ff0144035fa service nova] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.805050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.979s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.805050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.980s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.808892] env[61839]: INFO nova.compute.claims [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.846930] env[61839]: INFO nova.scheduler.client.report [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Deleted allocations for instance bac4c882-a23d-412f-ae98-f4f21d86681a [ 936.866415] env[61839]: DEBUG nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 936.883247] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522b87f9-9ba5-6328-4eea-aa65c2805be5, 'name': SearchDatastore_Task, 'duration_secs': 0.062271} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.885827] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52effe6d-aa41-4863-a968-04de54917d67/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 936.889739] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0551dd34-d221-4f6e-a54a-7c94f3ad60fb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.891830] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b448e8d9-29e4-4b69-9cc8-266b08deddb1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.906177] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.906472] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.906638] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.906830] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.906983] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.907879] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.907984] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.908116] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.908309] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.908482] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.908662] env[61839]: DEBUG nova.virt.hardware [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.911483] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e921d0b-1d4b-49de-b32f-511213d6d0ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.914289] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52effe6d-aa41-4863-a968-04de54917d67/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 936.914461] env[61839]: ERROR oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52effe6d-aa41-4863-a968-04de54917d67/disk-0.vmdk due to incomplete transfer. [ 936.916271] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aff8e49a-7460-4080-acf3-ee7e7a4b4079 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.918131] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 936.918131] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e63892-dd3c-70d0-8f98-1cfc6f880128" [ 936.918131] env[61839]: _type = "Task" [ 936.918131] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.927280] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ded644b-398c-4629-ac72-c3f1fd68dd06 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.935860] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e63892-dd3c-70d0-8f98-1cfc6f880128, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.936191] env[61839]: DEBUG oslo_vmware.rw_handles [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52effe6d-aa41-4863-a968-04de54917d67/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 936.936346] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Uploaded image 5fe38bf0-d665-4992-a6d0-c15bcea2316d to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 936.938887] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 936.939846] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7be528b0-3361-487a-a1a3-a033bf2cee75 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.953816] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 936.953816] env[61839]: value = "task-1314768" [ 936.953816] env[61839]: _type = "Task" [ 936.953816] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.964627] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314768, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.029633] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314765, 'name': CloneVM_Task, 'duration_secs': 1.258067} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.029959] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Created linked-clone VM from snapshot [ 937.030807] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f6831c-0d0a-4ebb-8c4e-3a3a097c4c9b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.042377] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Uploading image 6ef5e78d-0a80-467c-a598-32955806b305 {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 937.068122] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314767, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.070296] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 937.070296] env[61839]: value = "vm-281420" [ 937.070296] env[61839]: _type = "VirtualMachine" [ 937.070296] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 937.070541] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-99cf7274-1bc9-4e4e-972c-cb1b94d9ad4e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.081134] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lease: (returnval){ [ 937.081134] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a53b1-0da1-f5d1-f110-0a3c750e6619" [ 937.081134] env[61839]: _type = "HttpNfcLease" [ 937.081134] env[61839]: } obtained for exporting VM: (result){ [ 937.081134] env[61839]: value = "vm-281420" [ 937.081134] env[61839]: _type = "VirtualMachine" [ 937.081134] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 937.081402] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the lease: (returnval){ [ 937.081402] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a53b1-0da1-f5d1-f110-0a3c750e6619" [ 937.081402] env[61839]: _type = "HttpNfcLease" [ 937.081402] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 937.091954] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 937.091954] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a53b1-0da1-f5d1-f110-0a3c750e6619" [ 937.091954] env[61839]: _type = "HttpNfcLease" [ 937.091954] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 937.360371] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da461224-3a6a-4880-b121-dec82bb4b069 tempest-ServerMetadataTestJSON-214230339 tempest-ServerMetadataTestJSON-214230339-project-member] Lock "bac4c882-a23d-412f-ae98-f4f21d86681a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.167s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.438311] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e63892-dd3c-70d0-8f98-1cfc6f880128, 'name': SearchDatastore_Task, 'duration_secs': 0.091996} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.439942] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.439942] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d718d866-dd6c-4332-b63a-be6850a5a785/d718d866-dd6c-4332-b63a-be6850a5a785.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 937.439942] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-789ced84-21a7-4d48-967f-2832723f5f51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.449693] env[61839]: DEBUG nova.compute.manager [req-b5753bb0-cfed-48de-974f-5fc3c4b76642 req-63f1c2cb-85ed-4072-bb5b-cab1be751149 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Received event network-vif-plugged-648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 937.450166] env[61839]: DEBUG oslo_concurrency.lockutils [req-b5753bb0-cfed-48de-974f-5fc3c4b76642 req-63f1c2cb-85ed-4072-bb5b-cab1be751149 service nova] Acquiring lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.450481] env[61839]: DEBUG oslo_concurrency.lockutils [req-b5753bb0-cfed-48de-974f-5fc3c4b76642 req-63f1c2cb-85ed-4072-bb5b-cab1be751149 service nova] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.450732] env[61839]: DEBUG oslo_concurrency.lockutils [req-b5753bb0-cfed-48de-974f-5fc3c4b76642 req-63f1c2cb-85ed-4072-bb5b-cab1be751149 service nova] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.451265] env[61839]: DEBUG nova.compute.manager [req-b5753bb0-cfed-48de-974f-5fc3c4b76642 req-63f1c2cb-85ed-4072-bb5b-cab1be751149 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] No waiting events found dispatching network-vif-plugged-648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 937.451522] env[61839]: WARNING nova.compute.manager [req-b5753bb0-cfed-48de-974f-5fc3c4b76642 req-63f1c2cb-85ed-4072-bb5b-cab1be751149 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Received unexpected event network-vif-plugged-648944d5-7ed5-40cb-8a22-8ea3244538d7 for instance with vm_state building and task_state spawning. [ 937.453787] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 937.453787] env[61839]: value = "task-1314770" [ 937.453787] env[61839]: _type = "Task" [ 937.453787] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.469143] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314768, 'name': Destroy_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.473091] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.569674] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314767, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.595511] env[61839]: DEBUG nova.network.neutron [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Successfully updated port: 648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.599283] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 937.599283] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a53b1-0da1-f5d1-f110-0a3c750e6619" [ 937.599283] env[61839]: _type = "HttpNfcLease" [ 937.599283] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 937.599753] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 937.599753] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525a53b1-0da1-f5d1-f110-0a3c750e6619" [ 937.599753] env[61839]: _type = "HttpNfcLease" [ 937.599753] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 937.603021] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced3bc48-217a-4806-a282-c7479d1147cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.616057] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dce99c-be22-a363-c85a-9ffa60cbfbc6/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 937.616376] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dce99c-be22-a363-c85a-9ffa60cbfbc6/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 937.750435] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-50b2eead-2879-4b22-a6a8-79e86dac56db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.976301] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.980384] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314768, 'name': Destroy_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.070026] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314767, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.095719] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1016fab6-b776-4f38-9f84-ddc381c3d4d2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.101476] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.101692] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.101880] env[61839]: DEBUG nova.network.neutron [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.110172] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efb7f4e-3787-4a9c-a994-9937d46cfce1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.149172] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6c19b9-07f6-4115-8fdf-15b478f64b2b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.162534] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beecdf48-231a-405d-8cef-eb9bc0422b66 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.185736] env[61839]: DEBUG nova.compute.provider_tree [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.468706] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314768, 'name': Destroy_Task, 'duration_secs': 1.366846} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.471946] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Destroyed the VM [ 938.472402] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 938.472744] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.472959] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b07ebaa3-c863-4f26-bc50-74f872824c49 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.481699] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 938.481699] env[61839]: value = "task-1314771" [ 938.481699] env[61839]: _type = "Task" [ 938.481699] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.491047] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314771, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.570237] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314767, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.469152} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.571032] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11/OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11.vmdk to [datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a/d630a97f-7e84-429e-8613-52457a48a10a.vmdk. [ 938.571464] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Cleaning up location [datastore1] OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 938.571821] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b68353e4-17e4-4ba9-bdda-d6cd4c813b11 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.572254] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5395c990-a94f-437b-81e3-6f3577ea2c33 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.583597] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 938.583597] env[61839]: value = "task-1314772" [ 938.583597] env[61839]: _type = "Task" [ 938.583597] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.596460] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.646522] env[61839]: DEBUG nova.network.neutron [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.691038] env[61839]: DEBUG nova.scheduler.client.report [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 938.852738] env[61839]: DEBUG nova.network.neutron [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updating instance_info_cache with network_info: [{"id": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "address": "fa:16:3e:12:04:b4", "network": {"id": "9ad4a7c4-51fa-42e2-927a-24d25b423b8b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1594396457-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d28bf7713204dfb9682d9c002cb5449", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648944d5-7e", "ovs_interfaceid": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.971044] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314770, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.389045} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.971044] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d718d866-dd6c-4332-b63a-be6850a5a785/d718d866-dd6c-4332-b63a-be6850a5a785.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 938.971044] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 938.971044] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a87c4d9b-9df8-43fc-801f-0301c9a24044 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.977969] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 938.977969] env[61839]: value = "task-1314773" [ 938.977969] env[61839]: _type = "Task" [ 938.977969] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.988912] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.996836] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314771, 'name': RemoveSnapshot_Task, 'duration_secs': 0.462497} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.997910] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 938.998431] env[61839]: DEBUG nova.compute.manager [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.999229] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736d97a3-5509-43ae-984a-4808e9832a93 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.094605] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314772, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194893} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.094945] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.095157] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a/d630a97f-7e84-429e-8613-52457a48a10a.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.095409] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a/d630a97f-7e84-429e-8613-52457a48a10a.vmdk to [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.095682] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04d275f9-b520-40b1-84d9-7e7e617c16ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.103978] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 939.103978] env[61839]: value = "task-1314774" [ 939.103978] env[61839]: _type = "Task" [ 939.103978] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.113456] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.211732] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.212307] env[61839]: DEBUG nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 939.218022] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.450s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.218022] env[61839]: DEBUG nova.objects.instance [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lazy-loading 'resources' on Instance uuid ef49a6f5-27c3-4595-af65-d6a5aa47d4e4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.359078] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.360107] env[61839]: DEBUG nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Instance network_info: |[{"id": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "address": "fa:16:3e:12:04:b4", "network": {"id": "9ad4a7c4-51fa-42e2-927a-24d25b423b8b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1594396457-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d28bf7713204dfb9682d9c002cb5449", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648944d5-7e", "ovs_interfaceid": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 939.361029] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:04:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '648944d5-7ed5-40cb-8a22-8ea3244538d7', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.369524] env[61839]: DEBUG oslo.service.loopingcall [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.369784] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.370020] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a07545d-5db2-45ce-81d7-3baa8690412b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.396864] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.396864] env[61839]: value = "task-1314775" [ 939.396864] env[61839]: _type = "Task" [ 939.396864] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.403985] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314775, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.485967] env[61839]: DEBUG nova.compute.manager [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Received event network-changed-648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 939.486379] env[61839]: DEBUG nova.compute.manager [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Refreshing instance network info cache due to event network-changed-648944d5-7ed5-40cb-8a22-8ea3244538d7. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 939.486649] env[61839]: DEBUG oslo_concurrency.lockutils [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] Acquiring lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.486846] env[61839]: DEBUG oslo_concurrency.lockutils [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] Acquired lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.487143] env[61839]: DEBUG nova.network.neutron [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Refreshing network info cache for port 648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.498323] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.199997} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.498641] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 939.499609] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808eb767-d626-44c5-94a2-b9c73aeee787 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.533603] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] d718d866-dd6c-4332-b63a-be6850a5a785/d718d866-dd6c-4332-b63a-be6850a5a785.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.533603] env[61839]: INFO nova.compute.manager [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Shelve offloading [ 939.535227] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05914300-a884-4bdb-8831-c8eaf503f035 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.550186] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.551253] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71996632-20e6-45da-ac04-cef41ffc5858 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.560416] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 939.560416] env[61839]: value = "task-1314777" [ 939.560416] env[61839]: _type = "Task" [ 939.560416] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.562428] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 939.562428] env[61839]: value = "task-1314776" [ 939.562428] env[61839]: _type = "Task" [ 939.562428] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.575499] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314777, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.579587] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 939.579799] env[61839]: DEBUG nova.compute.manager [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 939.581035] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84045a8-fb9b-4616-af88-90515218243b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.589392] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.589624] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.589814] env[61839]: DEBUG nova.network.neutron [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.617029] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314774, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.719413] env[61839]: DEBUG nova.compute.utils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 939.725524] env[61839]: DEBUG nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 939.725744] env[61839]: DEBUG nova.network.neutron [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 939.770092] env[61839]: DEBUG nova.policy [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de344d8cc13340d7affed971d75f486d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '251b0d2531ba4f14a2eb6ea75382c418', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 939.914424] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314775, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.055594] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8c4199-bc6c-465b-9328-3d6bde287a1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.076334] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eff7722-d24a-4681-8d46-afb81fec6029 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.118479] env[61839]: DEBUG nova.network.neutron [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Successfully created port: 5dc2a4d8-1803-4434-b9af-037ce20523af {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.130715] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f00e793-75f6-4297-ab6c-83745763129d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.133484] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314777, 'name': ReconfigVM_Task, 'duration_secs': 0.346203} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.134762] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfigured VM instance instance-00000059 to attach disk [datastore1] d718d866-dd6c-4332-b63a-be6850a5a785/d718d866-dd6c-4332-b63a-be6850a5a785.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.136253] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3583998c-aecc-476b-aec6-72d5ff52da5b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.144565] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314774, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.148096] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 940.148096] env[61839]: value = "task-1314778" [ 940.148096] env[61839]: _type = "Task" [ 940.148096] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.149858] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a564e1f1-f044-48f2-a1d3-6d0a97a6dd26 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.168037] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314778, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.181441] env[61839]: DEBUG nova.compute.provider_tree [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.226411] env[61839]: DEBUG nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 940.412845] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314775, 'name': CreateVM_Task, 'duration_secs': 0.851541} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.413038] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.413957] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.414651] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.415039] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.415342] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e5b632a-1020-42f4-985a-4ce90e402629 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.423744] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 940.423744] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bf47c2-eaaf-647e-e191-1f324c801f59" [ 940.423744] env[61839]: _type = "Task" [ 940.423744] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.439535] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bf47c2-eaaf-647e-e191-1f324c801f59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.638035] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314774, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.653284] env[61839]: DEBUG nova.network.neutron [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updated VIF entry in instance network info cache for port 648944d5-7ed5-40cb-8a22-8ea3244538d7. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.654033] env[61839]: DEBUG nova.network.neutron [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updating instance_info_cache with network_info: [{"id": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "address": "fa:16:3e:12:04:b4", "network": {"id": "9ad4a7c4-51fa-42e2-927a-24d25b423b8b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1594396457-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d28bf7713204dfb9682d9c002cb5449", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648944d5-7e", "ovs_interfaceid": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.665041] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314778, 'name': Rename_Task, 'duration_secs': 0.211946} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.665345] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.665630] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0134a54f-98d6-4b40-924f-7f2899d10913 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.677446] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 940.677446] env[61839]: value = "task-1314779" [ 940.677446] env[61839]: _type = "Task" [ 940.677446] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.692886] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.695100] env[61839]: DEBUG nova.scheduler.client.report [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 940.710794] env[61839]: DEBUG nova.network.neutron [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.937080] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bf47c2-eaaf-647e-e191-1f324c801f59, 'name': SearchDatastore_Task, 'duration_secs': 0.086809} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.937538] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.937863] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.938121] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.938487] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.938728] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.939118] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a168ff4b-2755-40fd-bfd4-baca3a500acd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.963316] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.963316] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.964090] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-161ccc07-0df4-4cab-b509-1d62fc5103a4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.977243] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 940.977243] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521097c5-1e8c-6e04-9940-1f85f2213f7a" [ 940.977243] env[61839]: _type = "Task" [ 940.977243] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.995134] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521097c5-1e8c-6e04-9940-1f85f2213f7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.132449] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314774, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.157692] env[61839]: DEBUG oslo_concurrency.lockutils [req-ccb88773-b569-45b5-9ec9-91cb366f69f7 req-836ad154-c21c-44df-a41b-075912d716b7 service nova] Releasing lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.197633] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314779, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.200707] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.205098] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.423s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.205206] env[61839]: INFO nova.compute.claims [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.214811] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.234705] env[61839]: INFO nova.scheduler.client.report [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted allocations for instance ef49a6f5-27c3-4595-af65-d6a5aa47d4e4 [ 941.241689] env[61839]: DEBUG nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 941.273536] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.273751] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.273842] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.274112] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.274351] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.275177] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.275177] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.275177] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.275453] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.275517] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.275657] env[61839]: DEBUG nova.virt.hardware [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.277788] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e90706-bc86-4d2a-b7d0-ec98ab324e04 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.291179] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddee42ed-7310-4c86-8d1f-312fdb015451 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.492976] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521097c5-1e8c-6e04-9940-1f85f2213f7a, 'name': SearchDatastore_Task, 'duration_secs': 0.088433} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.493976] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7437be4f-6964-48ce-86e9-0a33ae4143ed {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.502927] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 941.502927] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5288dad7-d91f-9d84-6cf1-428917c3b694" [ 941.502927] env[61839]: _type = "Task" [ 941.502927] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.515280] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5288dad7-d91f-9d84-6cf1-428917c3b694, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.632875] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314774, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.684506] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.685199] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cccdb63-6dcb-4153-931a-d50d4811a745 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.696672] env[61839]: DEBUG oslo_vmware.api [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314779, 'name': PowerOnVM_Task, 'duration_secs': 0.632546} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.699142] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.699413] env[61839]: INFO nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Took 9.31 seconds to spawn the instance on the hypervisor. [ 941.699569] env[61839]: DEBUG nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 941.699821] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.700607] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cc0ef4-ad21-403e-9bf6-b1c34a8326ed {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.703274] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3d76953-7205-49d3-9a42-72b29d4b396b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.744626] env[61839]: DEBUG nova.compute.manager [req-6d4847fa-db7c-4686-823d-ca20cfbe7ac2 req-a6558f4b-c537-454b-9da3-d16d6bebdc58 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received event network-vif-unplugged-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.745269] env[61839]: DEBUG oslo_concurrency.lockutils [req-6d4847fa-db7c-4686-823d-ca20cfbe7ac2 req-a6558f4b-c537-454b-9da3-d16d6bebdc58 service nova] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.745269] env[61839]: DEBUG oslo_concurrency.lockutils [req-6d4847fa-db7c-4686-823d-ca20cfbe7ac2 req-a6558f4b-c537-454b-9da3-d16d6bebdc58 service nova] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.745386] env[61839]: DEBUG oslo_concurrency.lockutils [req-6d4847fa-db7c-4686-823d-ca20cfbe7ac2 req-a6558f4b-c537-454b-9da3-d16d6bebdc58 service nova] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.745496] env[61839]: DEBUG nova.compute.manager [req-6d4847fa-db7c-4686-823d-ca20cfbe7ac2 req-a6558f4b-c537-454b-9da3-d16d6bebdc58 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] No waiting events found dispatching network-vif-unplugged-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 941.745799] env[61839]: WARNING nova.compute.manager [req-6d4847fa-db7c-4686-823d-ca20cfbe7ac2 req-a6558f4b-c537-454b-9da3-d16d6bebdc58 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received unexpected event network-vif-unplugged-ef8176cf-7494-44f4-a600-7dedff162419 for instance with vm_state shelved and task_state shelving_offloading. [ 941.749374] env[61839]: DEBUG oslo_concurrency.lockutils [None req-294e9c99-ffcc-4188-96c2-7bc59baf0651 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "ef49a6f5-27c3-4595-af65-d6a5aa47d4e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.049s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.771835] env[61839]: DEBUG nova.compute.manager [req-a11073cf-fa1c-45ea-94ff-e7439aabfc9a req-30fe1444-60c2-4a13-9b2d-b0a018e7a5bb service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Received event network-vif-plugged-5dc2a4d8-1803-4434-b9af-037ce20523af {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.772371] env[61839]: DEBUG oslo_concurrency.lockutils [req-a11073cf-fa1c-45ea-94ff-e7439aabfc9a req-30fe1444-60c2-4a13-9b2d-b0a018e7a5bb service nova] Acquiring lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.772666] env[61839]: DEBUG oslo_concurrency.lockutils [req-a11073cf-fa1c-45ea-94ff-e7439aabfc9a req-30fe1444-60c2-4a13-9b2d-b0a018e7a5bb service nova] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.772870] env[61839]: DEBUG oslo_concurrency.lockutils [req-a11073cf-fa1c-45ea-94ff-e7439aabfc9a req-30fe1444-60c2-4a13-9b2d-b0a018e7a5bb service nova] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.773118] env[61839]: DEBUG nova.compute.manager [req-a11073cf-fa1c-45ea-94ff-e7439aabfc9a req-30fe1444-60c2-4a13-9b2d-b0a018e7a5bb service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] No waiting events found dispatching network-vif-plugged-5dc2a4d8-1803-4434-b9af-037ce20523af {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 941.773254] env[61839]: WARNING nova.compute.manager [req-a11073cf-fa1c-45ea-94ff-e7439aabfc9a req-30fe1444-60c2-4a13-9b2d-b0a018e7a5bb service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Received unexpected event network-vif-plugged-5dc2a4d8-1803-4434-b9af-037ce20523af for instance with vm_state building and task_state spawning. [ 941.801953] env[61839]: DEBUG nova.network.neutron [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Successfully updated port: 5dc2a4d8-1803-4434-b9af-037ce20523af {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 941.970678] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "75344275-bdf2-4526-a101-e62ec270dd72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.970915] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "75344275-bdf2-4526-a101-e62ec270dd72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.018038] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5288dad7-d91f-9d84-6cf1-428917c3b694, 'name': SearchDatastore_Task, 'duration_secs': 0.084459} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.019533] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.019827] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d4a8c153-7585-4c78-8aa4-56077e0a7af6/d4a8c153-7585-4c78-8aa4-56077e0a7af6.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 942.020144] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.020333] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.020542] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleting the datastore file [datastore2] 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.020740] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d05c742-d663-4b5c-9658-a1a3f47dfbf1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.023178] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f89f71e5-f9cd-4472-a86b-6fc44d4ca511 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.030689] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 942.030689] env[61839]: value = "task-1314782" [ 942.030689] env[61839]: _type = "Task" [ 942.030689] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.032049] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 942.032049] env[61839]: value = "task-1314781" [ 942.032049] env[61839]: _type = "Task" [ 942.032049] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.043766] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.047056] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.130627] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314774, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.580989} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.130938] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d630a97f-7e84-429e-8613-52457a48a10a/d630a97f-7e84-429e-8613-52457a48a10a.vmdk to [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 942.131795] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdae833c-107a-4229-a8bf-3a7cb1a41e56 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.155643] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.155978] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed21a91e-1b78-46c9-958a-32e0cb227898 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.178508] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 942.178508] env[61839]: value = "task-1314783" [ 942.178508] env[61839]: _type = "Task" [ 942.178508] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.187610] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314783, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.229062] env[61839]: INFO nova.compute.manager [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Took 24.49 seconds to build instance. [ 942.307919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "refresh_cache-21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.308101] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "refresh_cache-21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.308287] env[61839]: DEBUG nova.network.neutron [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 942.337133] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.337133] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.473708] env[61839]: DEBUG nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 942.529949] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54845948-445d-44d9-9e2a-e36e0ba94b0a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.550132] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2358daf8-cff2-4405-9141-51a02fa2afef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.553704] env[61839]: DEBUG oslo_vmware.api [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.469415} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.559650] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.559902] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.560134] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.562299] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314781, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.595856] env[61839]: INFO nova.scheduler.client.report [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleted allocations for instance 86525ea7-af75-4b10-85a1-c0fbab73ea5f [ 942.600075] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bf108a-259d-4e19-813f-d92f88ff9704 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.612582] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d5bd41-264c-41c1-82a3-0998013b2b61 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.632310] env[61839]: DEBUG nova.compute.provider_tree [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.693077] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314783, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.731965] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cbc82b7c-e238-4c12-a25e-cd1b247d627b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "d718d866-dd6c-4332-b63a-be6850a5a785" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.011s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.840051] env[61839]: DEBUG nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 942.852407] env[61839]: DEBUG nova.network.neutron [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.001334] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.024608] env[61839]: DEBUG nova.network.neutron [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Updating instance_info_cache with network_info: [{"id": "5dc2a4d8-1803-4434-b9af-037ce20523af", "address": "fa:16:3e:1b:ac:a8", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc2a4d8-18", "ovs_interfaceid": "5dc2a4d8-1803-4434-b9af-037ce20523af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.050751] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.811195} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.051067] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] d4a8c153-7585-4c78-8aa4-56077e0a7af6/d4a8c153-7585-4c78-8aa4-56077e0a7af6.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.051318] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.051598] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81e0dfd8-244b-49ae-b59d-0e84c205572f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.059517] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 943.059517] env[61839]: value = "task-1314784" [ 943.059517] env[61839]: _type = "Task" [ 943.059517] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.070651] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314784, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.101444] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.135853] env[61839]: DEBUG nova.scheduler.client.report [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.193513] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314783, 'name': ReconfigVM_Task, 'duration_secs': 0.597165} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.193922] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549/694a5d4b-3673-406b-a24a-d37fad33e549.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.195460] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'disk_bus': None, 'device_type': 'disk', 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'encryption_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'image_id': 'e497cc62-282a-4a70-9770-22d80d8a1013'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281409', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'name': 'volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '694a5d4b-3673-406b-a24a-d37fad33e549', 'attached_at': '', 'detached_at': '', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'serial': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2'}, 'guest_format': None, 'attachment_id': 'be33680c-46ec-4df7-b2e6-47f44b8600ce', 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'boot_index': None, 'volume_type': None}], 'swap': None} {{(pid=61839) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 943.196558] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 943.197084] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281409', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'name': 'volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '694a5d4b-3673-406b-a24a-d37fad33e549', 'attached_at': '', 'detached_at': '', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'serial': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 943.197989] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee3a41d-07e5-43fc-a447-110c0f322934 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.216798] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efcf221-d812-43db-b537-fe4720574d5d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.246266] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2/volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.246654] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2551e93b-3c31-494b-8469-250d18249a13 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.277945] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 943.277945] env[61839]: value = "task-1314785" [ 943.277945] env[61839]: _type = "Task" [ 943.277945] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.287657] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314785, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.361429] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.527460] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "refresh_cache-21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.527980] env[61839]: DEBUG nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Instance network_info: |[{"id": "5dc2a4d8-1803-4434-b9af-037ce20523af", "address": "fa:16:3e:1b:ac:a8", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc2a4d8-18", "ovs_interfaceid": "5dc2a4d8-1803-4434-b9af-037ce20523af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 943.528394] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:ac:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dc2a4d8-1803-4434-b9af-037ce20523af', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 943.537015] env[61839]: DEBUG oslo.service.loopingcall [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.537264] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 943.537528] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fe651fb-2e58-4250-addd-6748d453d7ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.558212] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.558212] env[61839]: value = "task-1314786" [ 943.558212] env[61839]: _type = "Task" [ 943.558212] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.572343] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314784, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133234} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.575680] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.575968] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314786, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.576700] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01762b2-4f8b-4f2d-9ef4-bf461e33fec6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.599757] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] d4a8c153-7585-4c78-8aa4-56077e0a7af6/d4a8c153-7585-4c78-8aa4-56077e0a7af6.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.600158] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09ae3ead-e7e1-4c38-8fa9-0a71aabc574d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.620970] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 943.620970] env[61839]: value = "task-1314787" [ 943.620970] env[61839]: _type = "Task" [ 943.620970] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.630452] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314787, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.641522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.642170] env[61839]: DEBUG nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 943.645034] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.530s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.645269] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.647473] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.341s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.649087] env[61839]: INFO nova.compute.claims [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.672054] env[61839]: INFO nova.scheduler.client.report [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted allocations for instance fa8a2265-291e-4424-bea1-72574e495a72 [ 943.789489] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.851529] env[61839]: DEBUG nova.compute.manager [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received event network-changed-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.851886] env[61839]: DEBUG nova.compute.manager [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Refreshing instance network info cache due to event network-changed-ef8176cf-7494-44f4-a600-7dedff162419. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 943.852244] env[61839]: DEBUG oslo_concurrency.lockutils [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.852487] env[61839]: DEBUG oslo_concurrency.lockutils [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.852746] env[61839]: DEBUG nova.network.neutron [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Refreshing network info cache for port ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.855392] env[61839]: DEBUG nova.compute.manager [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Received event network-changed-5dc2a4d8-1803-4434-b9af-037ce20523af {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.855661] env[61839]: DEBUG nova.compute.manager [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Refreshing instance network info cache due to event network-changed-5dc2a4d8-1803-4434-b9af-037ce20523af. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 943.856431] env[61839]: DEBUG oslo_concurrency.lockutils [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] Acquiring lock "refresh_cache-21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.856431] env[61839]: DEBUG oslo_concurrency.lockutils [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] Acquired lock "refresh_cache-21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.856431] env[61839]: DEBUG nova.network.neutron [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Refreshing network info cache for port 5dc2a4d8-1803-4434-b9af-037ce20523af {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.069732] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314786, 'name': CreateVM_Task, 'duration_secs': 0.447872} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.069923] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 944.070676] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.070868] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.071239] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 944.071642] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-367f772c-8c81-4910-a975-c2a4b253e888 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.077355] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 944.077355] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b080f8-15bc-4874-a71d-bd3d1ebd1d13" [ 944.077355] env[61839]: _type = "Task" [ 944.077355] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.086050] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b080f8-15bc-4874-a71d-bd3d1ebd1d13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.132391] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314787, 'name': ReconfigVM_Task, 'duration_secs': 0.396726} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.132754] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Reconfigured VM instance instance-0000005a to attach disk [datastore1] d4a8c153-7585-4c78-8aa4-56077e0a7af6/d4a8c153-7585-4c78-8aa4-56077e0a7af6.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.133456] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e793cdc7-befe-4298-b34f-2ed4003be952 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.142394] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 944.142394] env[61839]: value = "task-1314788" [ 944.142394] env[61839]: _type = "Task" [ 944.142394] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.151431] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314788, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.154295] env[61839]: DEBUG nova.compute.utils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 944.158069] env[61839]: DEBUG nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 944.158069] env[61839]: DEBUG nova.network.neutron [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 944.179490] env[61839]: DEBUG oslo_concurrency.lockutils [None req-87a2dd96-4fb1-4151-85f6-7df76dfda5a7 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "fa8a2265-291e-4424-bea1-72574e495a72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.536s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.229394] env[61839]: DEBUG nova.policy [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8677a31386a54087b2328734c2eadeb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8935bcc7ee644cb7a2a33557a708189c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 944.293020] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314785, 'name': ReconfigVM_Task, 'duration_secs': 0.591174} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.293020] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfigured VM instance instance-00000040 to attach disk [datastore1] volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2/volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.295662] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5df67217-2cbc-4a57-ab59-4d3c054c5907 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.315839] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 944.315839] env[61839]: value = "task-1314789" [ 944.315839] env[61839]: _type = "Task" [ 944.315839] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.328347] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314789, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.548127] env[61839]: DEBUG nova.network.neutron [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Successfully created port: 56222a14-6e55-4e9b-a963-5d868763ad21 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.589262] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b080f8-15bc-4874-a71d-bd3d1ebd1d13, 'name': SearchDatastore_Task, 'duration_secs': 0.011723} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.589731] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.589960] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.590097] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.590250] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.590437] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.590799] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ca63866-3f95-4204-9a86-e870074b443b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.600326] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.600547] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 944.601321] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2af4bf2f-cbe7-4dd7-82c7-02342d0698b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.607843] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 944.607843] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ba393c-b67d-9fb1-abf8-a9cc0f8ee69b" [ 944.607843] env[61839]: _type = "Task" [ 944.607843] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.616634] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ba393c-b67d-9fb1-abf8-a9cc0f8ee69b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.652333] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314788, 'name': Rename_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.663283] env[61839]: DEBUG nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 944.829486] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314789, 'name': ReconfigVM_Task, 'duration_secs': 0.248932} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.829812] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281409', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'name': 'volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '694a5d4b-3673-406b-a24a-d37fad33e549', 'attached_at': '', 'detached_at': '', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'serial': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 944.830445] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f38fbf1d-52a6-4528-918d-9129c3adffaa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.841586] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 944.841586] env[61839]: value = "task-1314790" [ 944.841586] env[61839]: _type = "Task" [ 944.841586] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.852815] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314790, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.980356] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02469e39-9c34-41dc-9366-81483c59d228 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.988712] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f283a4fd-44e9-41d1-a185-afdca8f53bbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.022498] env[61839]: DEBUG nova.network.neutron [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updated VIF entry in instance network info cache for port ef8176cf-7494-44f4-a600-7dedff162419. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.022970] env[61839]: DEBUG nova.network.neutron [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": null, "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapef8176cf-74", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.024898] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a4fd63-e929-496e-ba54-3ec962dee703 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.029681] env[61839]: DEBUG nova.network.neutron [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Updated VIF entry in instance network info cache for port 5dc2a4d8-1803-4434-b9af-037ce20523af. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.030104] env[61839]: DEBUG nova.network.neutron [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Updating instance_info_cache with network_info: [{"id": "5dc2a4d8-1803-4434-b9af-037ce20523af", "address": "fa:16:3e:1b:ac:a8", "network": {"id": "8c9e6595-896e-44e0-996a-ff86bae5ad8e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-788251660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251b0d2531ba4f14a2eb6ea75382c418", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc2a4d8-18", "ovs_interfaceid": "5dc2a4d8-1803-4434-b9af-037ce20523af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.034788] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701ba994-bc11-4de3-a7bb-84ee5330f1e4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.050695] env[61839]: DEBUG nova.compute.provider_tree [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.121143] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ba393c-b67d-9fb1-abf8-a9cc0f8ee69b, 'name': SearchDatastore_Task, 'duration_secs': 0.011404} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.121401] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c37581a-aa2c-4bff-a20a-1fa2bae21888 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.127603] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 945.127603] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b528c-d63d-b8ff-3ed8-10914acb6431" [ 945.127603] env[61839]: _type = "Task" [ 945.127603] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.136495] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b528c-d63d-b8ff-3ed8-10914acb6431, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.153984] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314788, 'name': Rename_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.281052] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.351554] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314790, 'name': Rename_Task, 'duration_secs': 0.231927} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.351838] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.352133] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79136d20-2278-4b0f-888e-476c983bd0c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.359466] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 945.359466] env[61839]: value = "task-1314791" [ 945.359466] env[61839]: _type = "Task" [ 945.359466] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.368415] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.529037] env[61839]: DEBUG oslo_concurrency.lockutils [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.529387] env[61839]: DEBUG nova.compute.manager [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-changed-5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.529597] env[61839]: DEBUG nova.compute.manager [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing instance network info cache due to event network-changed-5d041d96-4a6e-44d5-a31a-e597194524e0. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.529828] env[61839]: DEBUG oslo_concurrency.lockutils [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.529982] env[61839]: DEBUG oslo_concurrency.lockutils [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.530236] env[61839]: DEBUG nova.network.neutron [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing network info cache for port 5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 945.534447] env[61839]: DEBUG oslo_concurrency.lockutils [req-db674259-3183-462f-8093-029774c7460b req-3207f398-76ca-4879-9380-0843b10c3a51 service nova] Releasing lock "refresh_cache-21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.553868] env[61839]: DEBUG nova.scheduler.client.report [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.639550] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521b528c-d63d-b8ff-3ed8-10914acb6431, 'name': SearchDatastore_Task, 'duration_secs': 0.011052} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.639550] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.639792] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9/21e1c5b2-9865-457b-87c8-ce56c3c7b8f9.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 945.640019] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c596c4ae-f504-4b08-8e17-3de5f7dcf942 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.649152] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 945.649152] env[61839]: value = "task-1314792" [ 945.649152] env[61839]: _type = "Task" [ 945.649152] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.652491] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314788, 'name': Rename_Task, 'duration_secs': 1.172924} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.657863] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.658250] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3fb0675-5e41-4edb-8c78-48100ddd89ef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.666186] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.667779] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 945.667779] env[61839]: value = "task-1314793" [ 945.667779] env[61839]: _type = "Task" [ 945.667779] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.673427] env[61839]: DEBUG nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 945.679059] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314793, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.703253] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 945.703564] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 945.703731] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.703935] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 945.704091] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.704250] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 945.704513] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 945.704735] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 945.704941] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 945.705224] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 945.705472] env[61839]: DEBUG nova.virt.hardware [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 945.706708] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cbc892-e27e-4ae4-95a4-91c73a89a48b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.716168] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763f4dcc-1d9e-4e64-b308-8b736f827f7b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.873643] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314791, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.025128] env[61839]: DEBUG nova.compute.manager [req-bf5c4c36-a95e-4699-9ee4-3e764e1d08b0 req-983dd7b3-03fb-4105-8ddb-040ac2aabbb7 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Received event network-vif-plugged-56222a14-6e55-4e9b-a963-5d868763ad21 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 946.025244] env[61839]: DEBUG oslo_concurrency.lockutils [req-bf5c4c36-a95e-4699-9ee4-3e764e1d08b0 req-983dd7b3-03fb-4105-8ddb-040ac2aabbb7 service nova] Acquiring lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.025561] env[61839]: DEBUG oslo_concurrency.lockutils [req-bf5c4c36-a95e-4699-9ee4-3e764e1d08b0 req-983dd7b3-03fb-4105-8ddb-040ac2aabbb7 service nova] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.025782] env[61839]: DEBUG oslo_concurrency.lockutils [req-bf5c4c36-a95e-4699-9ee4-3e764e1d08b0 req-983dd7b3-03fb-4105-8ddb-040ac2aabbb7 service nova] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.026058] env[61839]: DEBUG nova.compute.manager [req-bf5c4c36-a95e-4699-9ee4-3e764e1d08b0 req-983dd7b3-03fb-4105-8ddb-040ac2aabbb7 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] No waiting events found dispatching network-vif-plugged-56222a14-6e55-4e9b-a963-5d868763ad21 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 946.026261] env[61839]: WARNING nova.compute.manager [req-bf5c4c36-a95e-4699-9ee4-3e764e1d08b0 req-983dd7b3-03fb-4105-8ddb-040ac2aabbb7 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Received unexpected event network-vif-plugged-56222a14-6e55-4e9b-a963-5d868763ad21 for instance with vm_state building and task_state spawning. [ 946.059245] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.059899] env[61839]: DEBUG nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 946.062993] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.803s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.166313] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314792, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.180833] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314793, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.305340] env[61839]: DEBUG nova.network.neutron [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Successfully updated port: 56222a14-6e55-4e9b-a963-5d868763ad21 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 946.358198] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007f421c-5169-4096-92fd-5b7b64b38497 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.369812] env[61839]: DEBUG oslo_vmware.api [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314791, 'name': PowerOnVM_Task, 'duration_secs': 0.62393} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.371723] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.374571] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3127475-c2f7-483c-b225-1fa9f0c920fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.418597] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe9d15d-73d4-4b2d-acfb-f88b248ecff9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.427956] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482c97b1-6d50-4b7b-b49b-3278dea790db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.445433] env[61839]: DEBUG nova.compute.provider_tree [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.457856] env[61839]: DEBUG nova.network.neutron [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updated VIF entry in instance network info cache for port 5d041d96-4a6e-44d5-a31a-e597194524e0. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 946.458404] env[61839]: DEBUG nova.network.neutron [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.486683] env[61839]: DEBUG nova.compute.manager [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.487472] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25be31f3-5049-4af7-9c2d-3f10582ac7c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.572845] env[61839]: DEBUG nova.compute.utils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 946.574203] env[61839]: DEBUG nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 946.574414] env[61839]: DEBUG nova.network.neutron [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.615443] env[61839]: DEBUG nova.policy [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b9ca67c278b4cb9a83ec3c6ce42af5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5aba1e066cb4400dbbacc92f393962e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 946.663552] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543548} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.663880] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9/21e1c5b2-9865-457b-87c8-ce56c3c7b8f9.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 946.664126] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 946.664387] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-845568a8-5738-4334-a87d-b82ae6362b1f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.673907] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 946.673907] env[61839]: value = "task-1314794" [ 946.673907] env[61839]: _type = "Task" [ 946.673907] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.680758] env[61839]: DEBUG oslo_vmware.api [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314793, 'name': PowerOnVM_Task, 'duration_secs': 0.559901} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.681488] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.681765] env[61839]: INFO nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Took 9.82 seconds to spawn the instance on the hypervisor. [ 946.682016] env[61839]: DEBUG nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.682913] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e81651a-2dec-4796-b9a1-2602ed84df00 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.688349] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.810729] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.810729] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.810729] env[61839]: DEBUG nova.network.neutron [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.948975] env[61839]: DEBUG nova.scheduler.client.report [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.961731] env[61839]: DEBUG oslo_concurrency.lockutils [req-845854f2-e6df-46f2-aa1b-ffd7b3e4cb31 req-75978ce6-6d3f-4403-8589-20334c696fde service nova] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.013665] env[61839]: DEBUG oslo_concurrency.lockutils [None req-bf3bcffb-6c34-4e16-a353-b3f853008852 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 37.604s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.015214] env[61839]: DEBUG nova.network.neutron [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Successfully created port: d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.039180] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dce99c-be22-a363-c85a-9ffa60cbfbc6/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 947.040495] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9331391d-5205-411a-ab88-13905de3ebbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.047872] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dce99c-be22-a363-c85a-9ffa60cbfbc6/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 947.048078] env[61839]: ERROR oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dce99c-be22-a363-c85a-9ffa60cbfbc6/disk-0.vmdk due to incomplete transfer. [ 947.048326] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8d5a4df5-dbe7-41a3-8704-d52132aae18e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.058133] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dce99c-be22-a363-c85a-9ffa60cbfbc6/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 947.058338] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Uploaded image 6ef5e78d-0a80-467c-a598-32955806b305 to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 947.060809] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 947.061172] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-39931b2a-f86a-4bac-af3f-042144fcb799 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.067695] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 947.067695] env[61839]: value = "task-1314795" [ 947.067695] env[61839]: _type = "Task" [ 947.067695] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.077992] env[61839]: DEBUG nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 947.080331] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314795, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.185259] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.187354} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.185610] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.186500] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da82794a-81e2-47e4-b8a1-ed45707e4da5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.214812] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9/21e1c5b2-9865-457b-87c8-ce56c3c7b8f9.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.216971] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38327b0d-9e70-468d-841a-36a09ab6f462 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.231217] env[61839]: INFO nova.compute.manager [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Took 26.10 seconds to build instance. [ 947.242329] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.242792] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 947.242792] env[61839]: value = "task-1314796" [ 947.242792] env[61839]: _type = "Task" [ 947.242792] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.243013] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.243221] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 947.257404] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314796, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.346457] env[61839]: DEBUG nova.network.neutron [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 947.455734] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.393s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.455951] env[61839]: INFO nova.compute.manager [None req-0c8f58da-95bd-460c-b9d5-27f4db5a2eae tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Successfully reverted task state from rebuilding on failure for instance. [ 947.461061] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 17.559s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.579968] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314795, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.592394] env[61839]: DEBUG nova.network.neutron [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance_info_cache with network_info: [{"id": "56222a14-6e55-4e9b-a963-5d868763ad21", "address": "fa:16:3e:5e:f1:1e", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56222a14-6e", "ovs_interfaceid": "56222a14-6e55-4e9b-a963-5d868763ad21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.737112] env[61839]: DEBUG oslo_concurrency.lockutils [None req-232c9fff-1e94-49d3-8695-e77d90e281a0 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.615s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.760086] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314796, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.794252] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.794447] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.794614] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 947.970398] env[61839]: INFO nova.compute.claims [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.080955] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314795, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.087618] env[61839]: DEBUG nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 948.098337] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.099880] env[61839]: DEBUG nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Instance network_info: |[{"id": "56222a14-6e55-4e9b-a963-5d868763ad21", "address": "fa:16:3e:5e:f1:1e", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56222a14-6e", "ovs_interfaceid": "56222a14-6e55-4e9b-a963-5d868763ad21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 948.099880] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:f1:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56222a14-6e55-4e9b-a963-5d868763ad21', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 948.113924] env[61839]: DEBUG oslo.service.loopingcall [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 948.118725] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 948.119063] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b941059a-51eb-4b8c-8770-0f486640c806 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.149460] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 948.149928] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 948.150199] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.150483] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 948.151522] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.151522] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 948.151522] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 948.151733] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 948.151777] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 948.152019] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 948.152282] env[61839]: DEBUG nova.virt.hardware [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 948.153766] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cac605c-daeb-489f-9899-b34bc93098e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.169030] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22898853-f363-4434-844a-42952dff3a1b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.174841] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 948.174841] env[61839]: value = "task-1314797" [ 948.174841] env[61839]: _type = "Task" [ 948.174841] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.196602] env[61839]: DEBUG nova.compute.manager [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Received event network-changed-56222a14-6e55-4e9b-a963-5d868763ad21 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.196919] env[61839]: DEBUG nova.compute.manager [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Refreshing instance network info cache due to event network-changed-56222a14-6e55-4e9b-a963-5d868763ad21. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 948.197281] env[61839]: DEBUG oslo_concurrency.lockutils [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] Acquiring lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.197532] env[61839]: DEBUG oslo_concurrency.lockutils [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] Acquired lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.197809] env[61839]: DEBUG nova.network.neutron [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Refreshing network info cache for port 56222a14-6e55-4e9b-a963-5d868763ad21 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.204392] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314797, 'name': CreateVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.259335] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314796, 'name': ReconfigVM_Task, 'duration_secs': 0.760374} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.259762] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9/21e1c5b2-9865-457b-87c8-ce56c3c7b8f9.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.260641] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99bf2d18-082d-45bb-a4b7-23d6c6c5313d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.277009] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 948.277009] env[61839]: value = "task-1314798" [ 948.277009] env[61839]: _type = "Task" [ 948.277009] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.289535] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314798, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.478876] env[61839]: INFO nova.compute.resource_tracker [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating resource usage from migration ede2e43c-56ba-4f49-9646-56aeac32d3ea [ 948.583164] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314795, 'name': Destroy_Task, 'duration_secs': 1.309507} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.583164] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Destroyed the VM [ 948.583164] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 948.583164] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-441bb4b3-4eaf-45ea-b4c7-188742fa8bb5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.590024] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 948.590024] env[61839]: value = "task-1314799" [ 948.590024] env[61839]: _type = "Task" [ 948.590024] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.606158] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314799, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.678593] env[61839]: DEBUG nova.compute.manager [req-2f220a05-7ed9-4b57-abc0-d68dfa4365b0 req-2659ddba-c813-47e8-b0a9-ba9aeb97d8d1 service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Received event network-vif-plugged-d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.678593] env[61839]: DEBUG oslo_concurrency.lockutils [req-2f220a05-7ed9-4b57-abc0-d68dfa4365b0 req-2659ddba-c813-47e8-b0a9-ba9aeb97d8d1 service nova] Acquiring lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.678593] env[61839]: DEBUG oslo_concurrency.lockutils [req-2f220a05-7ed9-4b57-abc0-d68dfa4365b0 req-2659ddba-c813-47e8-b0a9-ba9aeb97d8d1 service nova] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.678593] env[61839]: DEBUG oslo_concurrency.lockutils [req-2f220a05-7ed9-4b57-abc0-d68dfa4365b0 req-2659ddba-c813-47e8-b0a9-ba9aeb97d8d1 service nova] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.678593] env[61839]: DEBUG nova.compute.manager [req-2f220a05-7ed9-4b57-abc0-d68dfa4365b0 req-2659ddba-c813-47e8-b0a9-ba9aeb97d8d1 service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] No waiting events found dispatching network-vif-plugged-d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 948.678593] env[61839]: WARNING nova.compute.manager [req-2f220a05-7ed9-4b57-abc0-d68dfa4365b0 req-2659ddba-c813-47e8-b0a9-ba9aeb97d8d1 service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Received unexpected event network-vif-plugged-d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 for instance with vm_state building and task_state spawning. [ 948.687909] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314797, 'name': CreateVM_Task, 'duration_secs': 0.366488} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.691058] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.691953] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.692160] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.692521] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 948.692872] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-892f1399-1dc0-4853-afc5-8db84c14f95d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.700133] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 948.700133] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5207cf57-6bfa-034f-3e18-699eeeeb2306" [ 948.700133] env[61839]: _type = "Task" [ 948.700133] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.711251] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5207cf57-6bfa-034f-3e18-699eeeeb2306, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.787911] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314798, 'name': Rename_Task, 'duration_secs': 0.158658} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.792828] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.793620] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1add1ff-ac06-40fa-835f-9ce5257d460c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.803960] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 948.803960] env[61839]: value = "task-1314800" [ 948.803960] env[61839]: _type = "Task" [ 948.803960] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.805425] env[61839]: DEBUG nova.network.neutron [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Successfully updated port: d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.812138] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d6b4fe-449f-4d0b-8209-4362774ff4cb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.821679] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "refresh_cache-3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.821834] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "refresh_cache-3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.821993] env[61839]: DEBUG nova.network.neutron [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.830310] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314800, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.836403] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdaa1d2-c93d-4232-8f02-f8fab96bb84b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.877948] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4046f0fe-1d2e-4bdf-b141-901b03e1e5c6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.891764] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ee5875-d366-48b5-ab8c-3d720cc3a980 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.912230] env[61839]: DEBUG nova.compute.provider_tree [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.996055] env[61839]: DEBUG nova.network.neutron [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updated VIF entry in instance network info cache for port 56222a14-6e55-4e9b-a963-5d868763ad21. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 948.996485] env[61839]: DEBUG nova.network.neutron [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance_info_cache with network_info: [{"id": "56222a14-6e55-4e9b-a963-5d868763ad21", "address": "fa:16:3e:5e:f1:1e", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56222a14-6e", "ovs_interfaceid": "56222a14-6e55-4e9b-a963-5d868763ad21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.040187] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Updating instance_info_cache with network_info: [{"id": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "address": "fa:16:3e:31:8e:b6", "network": {"id": "6f1d4bda-c333-4556-9530-df719c058a5b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1248001983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "223d94c193814f649b5d1f35e3756071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc601f8f-cb", "ovs_interfaceid": "fc601f8f-cb33-41b2-9f00-9476cd3cbf01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.103578] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314799, 'name': RemoveSnapshot_Task, 'duration_secs': 0.46193} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.103909] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 949.104221] env[61839]: DEBUG nova.compute.manager [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 949.105020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af053b59-dbe6-4df8-a6cb-50a3d3c9110b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.211496] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5207cf57-6bfa-034f-3e18-699eeeeb2306, 'name': SearchDatastore_Task, 'duration_secs': 0.010952} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.211815] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.212084] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.212333] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.212486] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.212682] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.212979] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3278416c-6ea7-4e1e-966f-c05ad86cbd66 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.225440] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.225645] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.226522] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6d56fce-1090-46e0-a4c8-abdf0860a528 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.232601] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 949.232601] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cb61f-f480-2553-3716-4cb7bd54dc73" [ 949.232601] env[61839]: _type = "Task" [ 949.232601] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.241411] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cb61f-f480-2553-3716-4cb7bd54dc73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.317918] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314800, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.371701] env[61839]: DEBUG nova.network.neutron [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.417189] env[61839]: DEBUG nova.scheduler.client.report [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 949.499654] env[61839]: DEBUG oslo_concurrency.lockutils [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] Releasing lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.500087] env[61839]: DEBUG nova.compute.manager [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Received event network-changed-648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.500343] env[61839]: DEBUG nova.compute.manager [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Refreshing instance network info cache due to event network-changed-648944d5-7ed5-40cb-8a22-8ea3244538d7. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 949.501154] env[61839]: DEBUG oslo_concurrency.lockutils [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] Acquiring lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.501154] env[61839]: DEBUG oslo_concurrency.lockutils [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] Acquired lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.501154] env[61839]: DEBUG nova.network.neutron [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Refreshing network info cache for port 648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.543829] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-603191b6-a4b0-451b-b98b-f3dbfb684300" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.544034] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 949.544576] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.544911] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.545141] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.545349] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.545541] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.545694] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.545854] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 949.546010] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.565886] env[61839]: DEBUG nova.network.neutron [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Updating instance_info_cache with network_info: [{"id": "d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2", "address": "fa:16:3e:ae:c7:50", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd227bf3a-c4", "ovs_interfaceid": "d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.618594] env[61839]: INFO nova.compute.manager [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Shelve offloading [ 949.620622] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.620895] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d21cb0d-6b78-4fe9-addc-bd98d048c4fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.629912] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 949.629912] env[61839]: value = "task-1314801" [ 949.629912] env[61839]: _type = "Task" [ 949.629912] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.639222] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314801, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.743604] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528cb61f-f480-2553-3716-4cb7bd54dc73, 'name': SearchDatastore_Task, 'duration_secs': 0.0135} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.744429] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70bd2a3a-3499-479f-aa76-c19faae99728 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.750115] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 949.750115] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5283aba4-43b3-a4b6-57b9-e20a6bbfbffa" [ 949.750115] env[61839]: _type = "Task" [ 949.750115] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.758542] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5283aba4-43b3-a4b6-57b9-e20a6bbfbffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.817552] env[61839]: DEBUG oslo_vmware.api [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314800, 'name': PowerOnVM_Task, 'duration_secs': 0.717999} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.817798] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.817986] env[61839]: INFO nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Took 8.58 seconds to spawn the instance on the hypervisor. [ 949.818193] env[61839]: DEBUG nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 949.818968] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b965ed7d-92d7-4198-b1d7-9e0d7698e83b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.923996] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.463s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.924261] env[61839]: INFO nova.compute.manager [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Migrating [ 949.934417] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.608s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.934651] env[61839]: DEBUG nova.objects.instance [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lazy-loading 'resources' on Instance uuid 12087baa-e700-4977-b2df-3aa2c56cc2f6 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.048939] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.068838] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "refresh_cache-3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.069066] env[61839]: DEBUG nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Instance network_info: |[{"id": "d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2", "address": "fa:16:3e:ae:c7:50", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd227bf3a-c4", "ovs_interfaceid": "d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 950.072100] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:c7:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd227bf3a-c4a3-4ba4-9df9-4c7ab460feb2', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.079664] env[61839]: DEBUG oslo.service.loopingcall [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.079899] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.080959] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e14eeea5-1d71-45e2-8e3d-8ce6a651f8b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.102712] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.102712] env[61839]: value = "task-1314802" [ 950.102712] env[61839]: _type = "Task" [ 950.102712] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.114710] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314802, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.142084] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 950.142329] env[61839]: DEBUG nova.compute.manager [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 950.143148] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b25c99-8dc2-4e7f-8668-a0e0bff7da36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.149153] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.149328] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.149504] env[61839]: DEBUG nova.network.neutron [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.237856] env[61839]: DEBUG nova.network.neutron [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updated VIF entry in instance network info cache for port 648944d5-7ed5-40cb-8a22-8ea3244538d7. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.238084] env[61839]: DEBUG nova.network.neutron [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updating instance_info_cache with network_info: [{"id": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "address": "fa:16:3e:12:04:b4", "network": {"id": "9ad4a7c4-51fa-42e2-927a-24d25b423b8b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1594396457-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d28bf7713204dfb9682d9c002cb5449", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648944d5-7e", "ovs_interfaceid": "648944d5-7ed5-40cb-8a22-8ea3244538d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.277964] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5283aba4-43b3-a4b6-57b9-e20a6bbfbffa, 'name': SearchDatastore_Task, 'duration_secs': 0.010201} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.278332] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.278656] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 23ee24d5-bccd-497d-a53f-b9723fd9c707/23ee24d5-bccd-497d-a53f-b9723fd9c707.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 950.278977] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aeff1538-d0d5-4365-bd6d-27264da60e80 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.287712] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 950.287712] env[61839]: value = "task-1314803" [ 950.287712] env[61839]: _type = "Task" [ 950.287712] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.296974] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.337947] env[61839]: INFO nova.compute.manager [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Took 24.53 seconds to build instance. [ 950.440646] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.440646] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.440646] env[61839]: DEBUG nova.network.neutron [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.613259] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314802, 'name': CreateVM_Task, 'duration_secs': 0.411524} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.616088] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.617096] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.617385] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.617635] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.617914] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ada6545a-6e8f-46ec-8695-505533063095 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.623827] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 950.623827] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e84c5e-74b8-71c1-875d-8f6abcb8f554" [ 950.623827] env[61839]: _type = "Task" [ 950.623827] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.636341] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e84c5e-74b8-71c1-875d-8f6abcb8f554, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.696364] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b0915f-3aee-4bb5-9a31-138547a2a183 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.715542] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70233c43-7299-47c4-9b33-cd9f107e8414 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.722043] env[61839]: DEBUG nova.compute.manager [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Received event network-changed-d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.722268] env[61839]: DEBUG nova.compute.manager [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Refreshing instance network info cache due to event network-changed-d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 950.722495] env[61839]: DEBUG oslo_concurrency.lockutils [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] Acquiring lock "refresh_cache-3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.722647] env[61839]: DEBUG oslo_concurrency.lockutils [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] Acquired lock "refresh_cache-3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.722810] env[61839]: DEBUG nova.network.neutron [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Refreshing network info cache for port d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.758814] env[61839]: DEBUG oslo_concurrency.lockutils [req-6bd5924c-dfc9-44a7-a094-0c1f75577465 req-3077fcfc-d6ee-4b36-97a9-e28705caacd2 service nova] Releasing lock "refresh_cache-d4a8c153-7585-4c78-8aa4-56077e0a7af6" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.760885] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7830231f-3e86-478a-9092-ce8557d9139f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.779028] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935404d9-c79d-4124-b71e-00736fdae1fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.793269] env[61839]: DEBUG nova.compute.provider_tree [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.808319] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314803, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.841058] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fd9d3f07-46d8-42a7-a204-834fbf9f5be1 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.057s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.998686] env[61839]: DEBUG nova.network.neutron [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Updating instance_info_cache with network_info: [{"id": "fd557952-f86b-487e-9fb9-8cb7d176384e", "address": "fa:16:3e:c1:5a:fc", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd557952-f8", "ovs_interfaceid": "fd557952-f86b-487e-9fb9-8cb7d176384e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.135685] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e84c5e-74b8-71c1-875d-8f6abcb8f554, 'name': SearchDatastore_Task, 'duration_secs': 0.057168} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.136076] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.136268] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.136508] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.136660] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.136843] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.138108] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b026e0da-f01e-44e4-bc93-b91011123777 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.151644] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.151839] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.155353] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-867cccee-77a4-4b6c-8b38-b33a0b359813 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.161464] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 951.161464] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a54486-2f6d-c73e-1e37-c30201abdc6b" [ 951.161464] env[61839]: _type = "Task" [ 951.161464] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.169514] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a54486-2f6d-c73e-1e37-c30201abdc6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.191564] env[61839]: DEBUG nova.network.neutron [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [{"id": "bc315481-8651-4be3-bdd5-269b569b2817", "address": "fa:16:3e:ce:61:f0", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc315481-86", "ovs_interfaceid": "bc315481-8651-4be3-bdd5-269b569b2817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.304164] env[61839]: DEBUG nova.scheduler.client.report [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 951.307553] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582538} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.309974] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 23ee24d5-bccd-497d-a53f-b9723fd9c707/23ee24d5-bccd-497d-a53f-b9723fd9c707.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 951.310219] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 951.310477] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee5ed8b9-76f1-421a-aafb-25df3011e0d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.318457] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 951.318457] env[61839]: value = "task-1314804" [ 951.318457] env[61839]: _type = "Task" [ 951.318457] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.328764] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314804, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.430252] env[61839]: DEBUG nova.network.neutron [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Updated VIF entry in instance network info cache for port d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.430651] env[61839]: DEBUG nova.network.neutron [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Updating instance_info_cache with network_info: [{"id": "d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2", "address": "fa:16:3e:ae:c7:50", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd227bf3a-c4", "ovs_interfaceid": "d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.501720] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.673031] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a54486-2f6d-c73e-1e37-c30201abdc6b, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.673031] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eabf3287-98bb-4a22-a968-4f35c6413122 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.677952] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 951.677952] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b652a8-40b2-6506-bea9-4ef36f3bbecf" [ 951.677952] env[61839]: _type = "Task" [ 951.677952] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.686589] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b652a8-40b2-6506-bea9-4ef36f3bbecf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.694178] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.752262] env[61839]: DEBUG nova.compute.manager [req-3c09f2c3-7d66-44cc-b6b7-cd4b0f8f0182 req-b10edef8-9987-4208-bfcb-b1f53d5780b2 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Received event network-vif-unplugged-fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.752262] env[61839]: DEBUG oslo_concurrency.lockutils [req-3c09f2c3-7d66-44cc-b6b7-cd4b0f8f0182 req-b10edef8-9987-4208-bfcb-b1f53d5780b2 service nova] Acquiring lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.752443] env[61839]: DEBUG oslo_concurrency.lockutils [req-3c09f2c3-7d66-44cc-b6b7-cd4b0f8f0182 req-b10edef8-9987-4208-bfcb-b1f53d5780b2 service nova] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.752639] env[61839]: DEBUG oslo_concurrency.lockutils [req-3c09f2c3-7d66-44cc-b6b7-cd4b0f8f0182 req-b10edef8-9987-4208-bfcb-b1f53d5780b2 service nova] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.752855] env[61839]: DEBUG nova.compute.manager [req-3c09f2c3-7d66-44cc-b6b7-cd4b0f8f0182 req-b10edef8-9987-4208-bfcb-b1f53d5780b2 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] No waiting events found dispatching network-vif-unplugged-fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 951.753491] env[61839]: WARNING nova.compute.manager [req-3c09f2c3-7d66-44cc-b6b7-cd4b0f8f0182 req-b10edef8-9987-4208-bfcb-b1f53d5780b2 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Received unexpected event network-vif-unplugged-fd557952-f86b-487e-9fb9-8cb7d176384e for instance with vm_state shelved and task_state shelving_offloading. [ 951.773242] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 951.774362] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600c9e60-2911-41f1-895c-bc05654bf857 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.782759] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 951.783008] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41d206e3-bd6d-4e16-a312-89495b59fcc0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.809200] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.811922] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.367s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.812220] env[61839]: DEBUG nova.objects.instance [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lazy-loading 'resources' on Instance uuid d187e75f-39a9-467b-b5ef-e2772d9b71af {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.829278] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314804, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069563} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.829498] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.830503] env[61839]: INFO nova.scheduler.client.report [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Deleted allocations for instance 12087baa-e700-4977-b2df-3aa2c56cc2f6 [ 951.834861] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423516b5-0e31-43ea-ad68-35fe0f93f634 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.860057] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 23ee24d5-bccd-497d-a53f-b9723fd9c707/23ee24d5-bccd-497d-a53f-b9723fd9c707.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.860442] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-940b5213-19e0-454a-9055-d706085e23f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.879889] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.880079] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.880263] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleting the datastore file [datastore2] e265dcd3-6ab5-44b1-85be-bad934ebdb79 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.880952] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66c34d05-ba68-46f0-9337-3651caf16c7e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.887458] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 951.887458] env[61839]: value = "task-1314806" [ 951.887458] env[61839]: _type = "Task" [ 951.887458] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.888618] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 951.888618] env[61839]: value = "task-1314807" [ 951.888618] env[61839]: _type = "Task" [ 951.888618] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.899511] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314806, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.902564] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.932906] env[61839]: DEBUG oslo_concurrency.lockutils [req-be0119e5-82e6-4c2e-ba24-aade15cf04ba req-fbc27b0f-6878-469e-b173-9b7256dcf7ec service nova] Releasing lock "refresh_cache-3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.148988] env[61839]: DEBUG nova.compute.manager [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 952.149840] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedeb6c9-496c-43ae-9075-d050786c5fb7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.188749] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b652a8-40b2-6506-bea9-4ef36f3bbecf, 'name': SearchDatastore_Task, 'duration_secs': 0.009608} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.189051] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.189327] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65/3e27bc4a-a4f3-4929-931a-0c3ecaf10e65.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.189602] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edeef1b6-fd7a-46f2-8a2c-a12a710c0bda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.201665] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 952.201665] env[61839]: value = "task-1314808" [ 952.201665] env[61839]: _type = "Task" [ 952.201665] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.211164] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.338683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dba42a3e-32cb-4660-b49f-d5351f0075ef tempest-ServerAddressesNegativeTestJSON-1352160384 tempest-ServerAddressesNegativeTestJSON-1352160384-project-member] Lock "12087baa-e700-4977-b2df-3aa2c56cc2f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.912s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.407753] env[61839]: DEBUG oslo_vmware.api [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158123} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.408124] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314806, 'name': ReconfigVM_Task, 'duration_secs': 0.293858} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.410690] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.410909] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 952.411111] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 952.412951] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 23ee24d5-bccd-497d-a53f-b9723fd9c707/23ee24d5-bccd-497d-a53f-b9723fd9c707.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.413927] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02a610f9-1829-4581-a4b9-17d86b48eca7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.422023] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 952.422023] env[61839]: value = "task-1314809" [ 952.422023] env[61839]: _type = "Task" [ 952.422023] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.431471] env[61839]: INFO nova.scheduler.client.report [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted allocations for instance e265dcd3-6ab5-44b1-85be-bad934ebdb79 [ 952.437778] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314809, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.584077] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dac6a8-e8e1-4040-81ba-0fc9d1a6c766 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.593915] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5533fc74-ab8a-42be-b974-0179894c1eb6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.628986] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf45ce77-3414-4b16-a028-3332c8b7a398 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.637999] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe06e0b-5bc4-4030-b09a-515bc66c8fe3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.653673] env[61839]: DEBUG nova.compute.provider_tree [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.662938] env[61839]: INFO nova.compute.manager [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] instance snapshotting [ 952.667488] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3cc93f-f6d0-4dbe-8975-6739aa118b9b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.689029] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3e0cfb-4779-42ff-a050-c81dd30d58b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.716360] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48543} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.716747] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65/3e27bc4a-a4f3-4929-931a-0c3ecaf10e65.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.716991] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.717551] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39ca6c13-0375-4abf-8783-a6eb7876313d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.727416] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 952.727416] env[61839]: value = "task-1314810" [ 952.727416] env[61839]: _type = "Task" [ 952.727416] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.738495] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.932351] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314809, 'name': Rename_Task, 'duration_secs': 0.342302} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.932703] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.932974] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-024f64d9-c33e-43c0-b471-17b5e9f7d7e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.939811] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.940909] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 952.940909] env[61839]: value = "task-1314811" [ 952.940909] env[61839]: _type = "Task" [ 952.940909] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.949841] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.157279] env[61839]: DEBUG nova.scheduler.client.report [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 953.199567] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 953.199827] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-71404dc6-e397-4363-af50-20c883679394 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.207948] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 953.207948] env[61839]: value = "task-1314812" [ 953.207948] env[61839]: _type = "Task" [ 953.207948] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.215221] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf454bd2-9e52-4f7b-92e9-3a285870c637 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.221669] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314812, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.239582] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance 'fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4' progress to 0 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 953.254124] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076715} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.254124] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.254124] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67190720-0db7-4e13-80bb-5618681f0fe3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.293657] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65/3e27bc4a-a4f3-4929-931a-0c3ecaf10e65.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.294036] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2345e79a-70e6-47d9-9885-c67a7804270d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.320488] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 953.320488] env[61839]: value = "task-1314813" [ 953.320488] env[61839]: _type = "Task" [ 953.320488] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.330817] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314813, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.451936] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314811, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.666987] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.855s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.669448] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.668s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.671037] env[61839]: INFO nova.compute.claims [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 953.720362] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314812, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.749486] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.750148] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f5795ae-9c3e-4c48-8f1e-95ca81e04406 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.759292] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 953.759292] env[61839]: value = "task-1314814" [ 953.759292] env[61839]: _type = "Task" [ 953.759292] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.770249] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 953.770515] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance 'fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4' progress to 17 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 953.834158] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314813, 'name': ReconfigVM_Task, 'duration_secs': 0.51181} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.837315] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65/3e27bc4a-a4f3-4929-931a-0c3ecaf10e65.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.837315] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9586bc0-20ac-4368-99d8-24c1528874f9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.843047] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 953.843047] env[61839]: value = "task-1314815" [ 953.843047] env[61839]: _type = "Task" [ 953.843047] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.854059] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314815, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.902606] env[61839]: DEBUG nova.compute.manager [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Received event network-changed-fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.902896] env[61839]: DEBUG nova.compute.manager [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Refreshing instance network info cache due to event network-changed-fd557952-f86b-487e-9fb9-8cb7d176384e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 953.903515] env[61839]: DEBUG oslo_concurrency.lockutils [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] Acquiring lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.903515] env[61839]: DEBUG oslo_concurrency.lockutils [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] Acquired lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.903641] env[61839]: DEBUG nova.network.neutron [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Refreshing network info cache for port fd557952-f86b-487e-9fb9-8cb7d176384e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.958026] env[61839]: DEBUG oslo_vmware.api [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314811, 'name': PowerOnVM_Task, 'duration_secs': 0.836056} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.958026] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.958026] env[61839]: INFO nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Took 8.28 seconds to spawn the instance on the hypervisor. [ 953.958026] env[61839]: DEBUG nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 953.958026] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4a3cc6-a1cb-4595-bcd4-8f3a3b31c941 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.983070] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.198340] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c179ae22-1694-4b99-9f62-5d0902929126 tempest-ServerActionsV293TestJSON-151230769 tempest-ServerActionsV293TestJSON-151230769-project-member] Lock "d187e75f-39a9-467b-b5ef-e2772d9b71af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.886s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.220269] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314812, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.279018] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.279018] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.279018] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.279018] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.279018] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.279018] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.279776] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.280099] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.280418] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.280712] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.281028] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.286614] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9451333-9ebe-4b89-8556-3d9dba73d475 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.304287] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 954.304287] env[61839]: value = "task-1314816" [ 954.304287] env[61839]: _type = "Task" [ 954.304287] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.314547] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314816, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.354960] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314815, 'name': Rename_Task, 'duration_secs': 0.223501} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.355362] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.355673] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c298be0c-1eb3-4b06-aaf4-09eadb411c9d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.363630] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 954.363630] env[61839]: value = "task-1314817" [ 954.363630] env[61839]: _type = "Task" [ 954.363630] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.373524] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314817, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.478278] env[61839]: INFO nova.compute.manager [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Took 27.72 seconds to build instance. [ 954.723068] env[61839]: DEBUG nova.network.neutron [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Updated VIF entry in instance network info cache for port fd557952-f86b-487e-9fb9-8cb7d176384e. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 954.723068] env[61839]: DEBUG nova.network.neutron [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Updating instance_info_cache with network_info: [{"id": "fd557952-f86b-487e-9fb9-8cb7d176384e", "address": "fa:16:3e:c1:5a:fc", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": null, "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapfd557952-f8", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.738481] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314812, 'name': CreateSnapshot_Task, 'duration_secs': 1.410475} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.739306] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 954.740708] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb752fd-9bba-492e-8a3e-75f31b0f1098 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.820225] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314816, 'name': ReconfigVM_Task, 'duration_secs': 0.182085} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.820909] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance 'fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4' progress to 33 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 954.879323] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314817, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.976591] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c3b157-e0b0-4b58-8e9c-cbf648458ebd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.981841] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ac991200-7ce6-466a-b5fd-f91f890fd8c6 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.233s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.988444] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5d646a-1a50-4ad7-a411-6862c51d7a36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.023195] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5877d3ee-03db-4e2b-a483-d53e34307547 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.032073] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59c42d0-b4a2-4080-bb9c-02ef1a026dc4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.046798] env[61839]: DEBUG nova.compute.provider_tree [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.227493] env[61839]: DEBUG oslo_concurrency.lockutils [req-35d9b988-f04a-49d0-9432-645da52622d7 req-fdab6409-f9d4-4051-bd1e-3c296c5bc5d1 service nova] Releasing lock "refresh_cache-e265dcd3-6ab5-44b1-85be-bad934ebdb79" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.269125] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 955.269523] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c19a4af7-72b8-46d7-90c2-354b69452881 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.279451] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 955.279451] env[61839]: value = "task-1314818" [ 955.279451] env[61839]: _type = "Task" [ 955.279451] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.289064] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314818, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.327773] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 955.327944] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 955.327987] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.328348] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 955.328439] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.328636] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 955.328891] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 955.329119] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 955.329354] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 955.329561] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 955.329916] env[61839]: DEBUG nova.virt.hardware [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 955.335375] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Reconfiguring VM instance instance-0000003c to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 955.335765] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d2ee017-f4df-4e41-8cee-1477cae839d1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.362355] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 955.362355] env[61839]: value = "task-1314819" [ 955.362355] env[61839]: _type = "Task" [ 955.362355] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.376620] env[61839]: DEBUG oslo_vmware.api [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314817, 'name': PowerOnVM_Task, 'duration_secs': 0.647583} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.380023] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.380250] env[61839]: INFO nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Took 7.29 seconds to spawn the instance on the hypervisor. [ 955.380439] env[61839]: DEBUG nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 955.381057] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314819, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.381434] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f637bf-3ffe-4fd0-91a6-ec9a2e25d13b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.552273] env[61839]: DEBUG nova.scheduler.client.report [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 955.793357] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314818, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.815478] env[61839]: DEBUG nova.compute.manager [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Stashing vm_state: active {{(pid=61839) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 955.875549] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314819, 'name': ReconfigVM_Task, 'duration_secs': 0.256096} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.876329] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Reconfigured VM instance instance-0000003c to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 955.877218] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3f0b01-f6e4-4831-af28-2b4408d0973c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.905871] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4/fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 955.907799] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa228747-0a9c-40cf-9d81-87c2ae971537 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.921065] env[61839]: INFO nova.compute.manager [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Took 27.63 seconds to build instance. [ 955.929728] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 955.929728] env[61839]: value = "task-1314820" [ 955.929728] env[61839]: _type = "Task" [ 955.929728] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.940244] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314820, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.057229] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.388s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.057875] env[61839]: DEBUG nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 956.061766] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.961s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.061766] env[61839]: DEBUG nova.objects.instance [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lazy-loading 'resources' on Instance uuid 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.291589] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314818, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.337598] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.423688] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7ff34d1a-453a-49ff-af63-371c12d0a7c5 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.139s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.441901] env[61839]: DEBUG oslo_vmware.api [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314820, 'name': ReconfigVM_Task, 'duration_secs': 0.263465} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.442374] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Reconfigured VM instance instance-0000003c to attach disk [datastore2] fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4/fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.442836] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance 'fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4' progress to 50 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 956.564139] env[61839]: DEBUG nova.compute.utils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.565572] env[61839]: DEBUG nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Not allocating networking since 'none' was specified. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 956.566983] env[61839]: DEBUG nova.objects.instance [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lazy-loading 'numa_topology' on Instance uuid 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.791273] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314818, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.951208] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87934835-0483-4b90-ab77-44f392cecce0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.980716] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818bea74-89bf-4642-9543-9b045946f675 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.003601] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "73b83239-bbc8-41d1-aec3-2b4519c320af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.003965] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.005137] env[61839]: DEBUG oslo_concurrency.lockutils [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "56369316-a445-4a2a-a0a6-967074104e19" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.005341] env[61839]: DEBUG oslo_concurrency.lockutils [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.006576] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance 'fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4' progress to 67 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 957.069338] env[61839]: DEBUG nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 957.072583] env[61839]: DEBUG nova.objects.base [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Object Instance<86525ea7-af75-4b10-85a1-c0fbab73ea5f> lazy-loaded attributes: resources,numa_topology {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 957.292934] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314818, 'name': CloneVM_Task, 'duration_secs': 1.590267} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.293266] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Created linked-clone VM from snapshot [ 957.294017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f7b926-f5c7-4b32-a881-eb6ea400f175 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.299593] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91cc94a-8b21-4730-9987-9ac2de084581 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.306444] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Uploading image 9475c833-3360-4ea1-bc60-3ec0bc96117b {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 957.312072] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8704372-39c3-4f43-8765-53e996054336 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.345209] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 957.345815] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd156504-ede7-42b4-9661-1e0cc385fee2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.348369] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bc0ce315-72b9-4e53-be8e-0c336d396ce4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.357418] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05889e5b-7b86-4438-9e6b-41baf783419c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.361931] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 957.361931] env[61839]: value = "task-1314821" [ 957.361931] env[61839]: _type = "Task" [ 957.361931] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.374623] env[61839]: DEBUG nova.compute.provider_tree [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.382234] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314821, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.386708] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.386944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.511654] env[61839]: INFO nova.compute.manager [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Detaching volume 3c43b563-deaf-4d67-8cda-7d714c1bfac1 [ 957.513811] env[61839]: DEBUG nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 957.556961] env[61839]: INFO nova.virt.block_device [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Attempting to driver detach volume 3c43b563-deaf-4d67-8cda-7d714c1bfac1 from mountpoint /dev/sdb [ 957.557349] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 957.557669] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281395', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'name': 'volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '56369316-a445-4a2a-a0a6-967074104e19', 'attached_at': '', 'detached_at': '', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'serial': '3c43b563-deaf-4d67-8cda-7d714c1bfac1'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 957.558660] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ef46d9-d0e6-4bcb-81eb-cf995b7314c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.562773] env[61839]: DEBUG nova.network.neutron [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Port bc315481-8651-4be3-bdd5-269b569b2817 binding to destination host cpu-1 is already ACTIVE {{(pid=61839) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 957.589620] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d6c1bc-92dc-4d63-b45b-b09c893fef66 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.598847] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d70570-7d89-4a81-9d81-586906d8bad9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.620495] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4f17b3-dbf0-4aa7-9c42-6b38ac4c5b8f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.637802] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] The volume has not been displaced from its original location: [datastore1] volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1/volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 957.643176] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfiguring VM instance instance-0000004c to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 957.643751] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22c71b5d-1dd1-4946-bead-f925fbff1cc6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.663763] env[61839]: DEBUG oslo_vmware.api [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 957.663763] env[61839]: value = "task-1314822" [ 957.663763] env[61839]: _type = "Task" [ 957.663763] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.672264] env[61839]: DEBUG oslo_vmware.api [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314822, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.874543] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314821, 'name': Destroy_Task, 'duration_secs': 0.487351} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.875118] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Destroyed the VM [ 957.875443] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 957.875760] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dc6302c1-6c00-451d-87e2-e687e3a78f53 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.878829] env[61839]: DEBUG nova.scheduler.client.report [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 957.889643] env[61839]: DEBUG nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 957.892680] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 957.892680] env[61839]: value = "task-1314823" [ 957.892680] env[61839]: _type = "Task" [ 957.892680] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.904444] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314823, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.041887] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.094127] env[61839]: DEBUG nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 958.120415] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 958.120688] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 958.120849] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.121096] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 958.121205] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.121359] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 958.121572] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 958.121737] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 958.121914] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 958.122095] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 958.122272] env[61839]: DEBUG nova.virt.hardware [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.123338] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8682aa-b685-4972-afbf-f4b6b68ac564 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.132211] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e12ab2c-ec38-4061-9f5f-af3db8c877e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.147805] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.153192] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Creating folder: Project (efae0f7f42e74acab5c24ce5877b068c). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.153477] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6940f48-8cb6-4d46-8031-8d65b489713a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.168681] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Created folder: Project (efae0f7f42e74acab5c24ce5877b068c) in parent group-v281288. [ 958.168885] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Creating folder: Instances. Parent ref: group-v281427. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.169543] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8293a4a5-1b9a-41cd-b463-681147ef04a2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.174322] env[61839]: DEBUG oslo_vmware.api [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314822, 'name': ReconfigVM_Task, 'duration_secs': 0.274864} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.174943] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Reconfigured VM instance instance-0000004c to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 958.179441] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa8c340b-47c8-4041-aebd-cc433f97e564 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.190416] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Created folder: Instances in parent group-v281427. [ 958.190653] env[61839]: DEBUG oslo.service.loopingcall [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.191255] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.191476] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fab9c36-5967-4872-a981-81eb263633d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.204444] env[61839]: DEBUG oslo_vmware.api [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 958.204444] env[61839]: value = "task-1314826" [ 958.204444] env[61839]: _type = "Task" [ 958.204444] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.211763] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.211763] env[61839]: value = "task-1314827" [ 958.211763] env[61839]: _type = "Task" [ 958.211763] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.214994] env[61839]: DEBUG oslo_vmware.api [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314826, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.224707] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314827, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.384862] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.323s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.388267] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.027s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.389740] env[61839]: INFO nova.compute.claims [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.408191] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314823, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.412743] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.588787] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.589783] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.590128] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.715439] env[61839]: DEBUG oslo_vmware.api [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314826, 'name': ReconfigVM_Task, 'duration_secs': 0.186898} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.718467] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281395', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'name': 'volume-3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '56369316-a445-4a2a-a0a6-967074104e19', 'attached_at': '', 'detached_at': '', 'volume_id': '3c43b563-deaf-4d67-8cda-7d714c1bfac1', 'serial': '3c43b563-deaf-4d67-8cda-7d714c1bfac1'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 958.726612] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314827, 'name': CreateVM_Task, 'duration_secs': 0.323112} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.726781] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.727215] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.727443] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.727761] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 958.727999] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-924a55c1-24b8-4ace-b6c0-57470c47a5d2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.732554] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 958.732554] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5220bc80-2466-d1db-4c2a-3dbe4643e5b8" [ 958.732554] env[61839]: _type = "Task" [ 958.732554] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.740877] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5220bc80-2466-d1db-4c2a-3dbe4643e5b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.893965] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1e94be6d-33e1-4c83-9b5e-75a1fbfe5edf tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 35.790s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.894978] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 13.614s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.895178] env[61839]: INFO nova.compute.manager [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Unshelving [ 958.907508] env[61839]: DEBUG oslo_vmware.api [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314823, 'name': RemoveSnapshot_Task, 'duration_secs': 0.758161} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.908399] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 959.245165] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5220bc80-2466-d1db-4c2a-3dbe4643e5b8, 'name': SearchDatastore_Task, 'duration_secs': 0.034052} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.245474] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.245964] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.245964] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.246095] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.246268] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.246622] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad60d8ea-be79-4d92-9419-fef0e40dcc4a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.265511] env[61839]: DEBUG nova.objects.instance [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lazy-loading 'flavor' on Instance uuid 56369316-a445-4a2a-a0a6-967074104e19 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.271358] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.271358] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.272196] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1e40f96-9a57-4025-9c05-0538c31ad64e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.278739] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 959.278739] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52584e7c-b080-0453-709f-c05ae8c7fa79" [ 959.278739] env[61839]: _type = "Task" [ 959.278739] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.288315] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52584e7c-b080-0453-709f-c05ae8c7fa79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.414274] env[61839]: WARNING nova.compute.manager [None req-aaf374c0-0e60-4b67-b233-126ef11dce55 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Image not found during snapshot: nova.exception.ImageNotFound: Image 9475c833-3360-4ea1-bc60-3ec0bc96117b could not be found. [ 959.645244] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.645452] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.645637] env[61839]: DEBUG nova.network.neutron [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.762085] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e3315f-abe4-4c3b-b4fb-b1b28ee5c1d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.776636] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87876b3-8457-4d99-86af-c1b4c7f8d5ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.819614] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52584e7c-b080-0453-709f-c05ae8c7fa79, 'name': SearchDatastore_Task, 'duration_secs': 0.029983} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.821997] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de719ea3-9bdc-4399-b865-c9cebee899a2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.824752] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c058cf3-bda5-475c-8f46-8f4d1092c44c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.836384] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 959.836384] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52439e59-5dec-338e-2f5b-3abf2d8aa8dc" [ 959.836384] env[61839]: _type = "Task" [ 959.836384] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.841843] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ddb58f-d5c8-42c0-84e7-331fbac7f171 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.855307] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52439e59-5dec-338e-2f5b-3abf2d8aa8dc, 'name': SearchDatastore_Task, 'duration_secs': 0.013985} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.863146] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.863519] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 75344275-bdf2-4526-a101-e62ec270dd72/75344275-bdf2-4526-a101-e62ec270dd72.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.864089] env[61839]: DEBUG nova.compute.provider_tree [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.865474] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31679bb6-2dbc-49b3-85ce-9697b06ae49d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.878129] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 959.878129] env[61839]: value = "task-1314828" [ 959.878129] env[61839]: _type = "Task" [ 959.878129] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.888301] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.923050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.001304] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.001690] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.001983] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.002370] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.002630] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.005257] env[61839]: INFO nova.compute.manager [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Terminating instance [ 960.007308] env[61839]: DEBUG nova.compute.manager [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 960.007512] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.008629] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cac6f50-15af-4c4c-a05f-4e6642e6669a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.021388] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.021500] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8000e68f-2c70-488b-8863-65478170e2ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.030712] env[61839]: DEBUG oslo_vmware.api [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 960.030712] env[61839]: value = "task-1314829" [ 960.030712] env[61839]: _type = "Task" [ 960.030712] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.040290] env[61839]: DEBUG oslo_vmware.api [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314829, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.273472] env[61839]: DEBUG oslo_concurrency.lockutils [None req-836a90eb-6e5d-44c4-b013-cc645d786999 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.268s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.370123] env[61839]: DEBUG nova.scheduler.client.report [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.389868] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314828, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.396726] env[61839]: DEBUG nova.network.neutron [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [{"id": "bc315481-8651-4be3-bdd5-269b569b2817", "address": "fa:16:3e:ce:61:f0", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc315481-86", "ovs_interfaceid": "bc315481-8651-4be3-bdd5-269b569b2817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.516593] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "56369316-a445-4a2a-a0a6-967074104e19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.516959] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.517156] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "56369316-a445-4a2a-a0a6-967074104e19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.517442] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.517622] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.520567] env[61839]: INFO nova.compute.manager [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Terminating instance [ 960.522695] env[61839]: DEBUG nova.compute.manager [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 960.522902] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.523921] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c696bd5-1acf-4a45-8dbb-f8aa0d7ee7a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.532935] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.536621] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac3fe605-3e31-4235-8394-af0787779447 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.545160] env[61839]: DEBUG oslo_vmware.api [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314829, 'name': PowerOffVM_Task, 'duration_secs': 0.300746} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.546613] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.546814] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.547193] env[61839]: DEBUG oslo_vmware.api [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 960.547193] env[61839]: value = "task-1314830" [ 960.547193] env[61839]: _type = "Task" [ 960.547193] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.547417] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dd6eb52-2a54-40b9-ae39-179439425bc0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.560572] env[61839]: DEBUG oslo_vmware.api [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.640144] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.640144] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.640417] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleting the datastore file [datastore2] 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.640741] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67fc1517-78a1-4bcf-a741-d81e33d5c4c4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.649600] env[61839]: DEBUG oslo_vmware.api [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for the task: (returnval){ [ 960.649600] env[61839]: value = "task-1314832" [ 960.649600] env[61839]: _type = "Task" [ 960.649600] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.660686] env[61839]: DEBUG oslo_vmware.api [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.875075] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.875894] env[61839]: DEBUG nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 960.878429] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.830s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.878701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.878798] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 960.879125] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.939s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.879462] env[61839]: DEBUG nova.objects.instance [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'resources' on Instance uuid e265dcd3-6ab5-44b1-85be-bad934ebdb79 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.881326] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15747ab-3d48-42cc-822f-9a1c4f344ac2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.896621] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314828, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714008} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.896995] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 75344275-bdf2-4526-a101-e62ec270dd72/75344275-bdf2-4526-a101-e62ec270dd72.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.897279] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.898870] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eea1f61-bf5a-4f5b-9439-6fdb2a3e936e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.903065] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17518b18-60cd-48e4-8228-ceed61d49d3f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.905124] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.923254] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610931db-649f-40e4-a169-e8f0f515480d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.926515] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 960.926515] env[61839]: value = "task-1314833" [ 960.926515] env[61839]: _type = "Task" [ 960.926515] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.934376] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082de710-a8e4-4f8e-b318-9be48e1892e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.940849] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.970491] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179029MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 960.970657] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.060212] env[61839]: DEBUG oslo_vmware.api [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314830, 'name': PowerOffVM_Task, 'duration_secs': 0.203457} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.060486] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.060659] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.060920] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35b936ef-7e9f-467a-82fe-7503ee57a09d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.161476] env[61839]: DEBUG oslo_vmware.api [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Task: {'id': task-1314832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303966} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.161690] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.161887] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.162085] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.162272] env[61839]: INFO nova.compute.manager [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 961.162601] env[61839]: DEBUG oslo.service.loopingcall [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.162837] env[61839]: DEBUG nova.compute.manager [-] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 961.162936] env[61839]: DEBUG nova.network.neutron [-] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.186286] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.186509] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.186768] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleting the datastore file [datastore1] 56369316-a445-4a2a-a0a6-967074104e19 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.186991] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fba8e329-9968-45d2-9316-1e9255069e16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.194797] env[61839]: DEBUG oslo_vmware.api [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 961.194797] env[61839]: value = "task-1314835" [ 961.194797] env[61839]: _type = "Task" [ 961.194797] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.203829] env[61839]: DEBUG oslo_vmware.api [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314835, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.382668] env[61839]: DEBUG nova.compute.utils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 961.384047] env[61839]: DEBUG nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Not allocating networking since 'none' was specified. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 961.384591] env[61839]: DEBUG nova.objects.instance [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'numa_topology' on Instance uuid e265dcd3-6ab5-44b1-85be-bad934ebdb79 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.432401] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2a4a87-c047-45c3-abc4-7508d52d096c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.441472] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074387} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.455659] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.456656] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d123849f-c4be-4be6-b039-48ad8aefaff3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.459845] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431e298e-ae4f-4c5a-ae1c-175138a869ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.464816] env[61839]: DEBUG nova.compute.manager [req-8b8f9636-8992-4c89-88e5-77f61f07398e req-5b174892-f1df-46c1-9e74-e1e71b353aa7 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Received event network-vif-deleted-5dc2a4d8-1803-4434-b9af-037ce20523af {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 961.465089] env[61839]: INFO nova.compute.manager [req-8b8f9636-8992-4c89-88e5-77f61f07398e req-5b174892-f1df-46c1-9e74-e1e71b353aa7 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Neutron deleted interface 5dc2a4d8-1803-4434-b9af-037ce20523af; detaching it from the instance and deleting it from the info cache [ 961.465287] env[61839]: DEBUG nova.network.neutron [req-8b8f9636-8992-4c89-88e5-77f61f07398e req-5b174892-f1df-46c1-9e74-e1e71b353aa7 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.484360] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 75344275-bdf2-4526-a101-e62ec270dd72/75344275-bdf2-4526-a101-e62ec270dd72.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.487650] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b9553c4-ae92-4093-86a4-d34e6cc603fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.502314] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance 'fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4' progress to 83 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 961.512715] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 961.512715] env[61839]: value = "task-1314836" [ 961.512715] env[61839]: _type = "Task" [ 961.512715] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.522330] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314836, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.704690] env[61839]: DEBUG oslo_vmware.api [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314835, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.489234} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.704959] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.705262] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.705466] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.705693] env[61839]: INFO nova.compute.manager [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Took 1.18 seconds to destroy the instance on the hypervisor. [ 961.705990] env[61839]: DEBUG oslo.service.loopingcall [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.706277] env[61839]: DEBUG nova.compute.manager [-] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 961.706356] env[61839]: DEBUG nova.network.neutron [-] [instance: 56369316-a445-4a2a-a0a6-967074104e19] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.885804] env[61839]: DEBUG nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 961.892027] env[61839]: DEBUG nova.objects.base [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 961.921270] env[61839]: DEBUG nova.network.neutron [-] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.967967] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ac7b5b8-6718-44fe-9c4d-f39f6f4f3a4f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.981129] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a493b56a-628b-473c-bb5e-3dc3553cbf95 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.011478] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb92deb-2f40-434a-9700-7a733b835e57 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance 'fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4' progress to 100 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 962.027363] env[61839]: DEBUG nova.compute.manager [req-8b8f9636-8992-4c89-88e5-77f61f07398e req-5b174892-f1df-46c1-9e74-e1e71b353aa7 service nova] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Detach interface failed, port_id=5dc2a4d8-1803-4434-b9af-037ce20523af, reason: Instance 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 962.038787] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314836, 'name': ReconfigVM_Task, 'duration_secs': 0.292063} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.040020] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 75344275-bdf2-4526-a101-e62ec270dd72/75344275-bdf2-4526-a101-e62ec270dd72.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.042564] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f970265-aac1-464d-a081-5f914af76f6f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.050630] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 962.050630] env[61839]: value = "task-1314837" [ 962.050630] env[61839]: _type = "Task" [ 962.050630] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.061780] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314837, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.208023] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e310fdf3-cbb5-43bf-a6b4-8186f74eba2a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.215756] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753f503d-baad-482e-981d-e9de76dad532 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.248340] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20130f8-6541-4e65-8faf-3d7d89ac25e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.259606] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fd11f9-8b95-4721-bb1a-aa532b1c94e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.283407] env[61839]: DEBUG nova.compute.provider_tree [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.427023] env[61839]: INFO nova.compute.manager [-] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Took 1.26 seconds to deallocate network for instance. [ 962.564118] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314837, 'name': Rename_Task, 'duration_secs': 0.433909} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.564118] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.564118] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f40ee6c2-027f-47e9-a5f2-d765f3ad93d9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.570642] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 962.570642] env[61839]: value = "task-1314838" [ 962.570642] env[61839]: _type = "Task" [ 962.570642] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.581716] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.786676] env[61839]: DEBUG nova.scheduler.client.report [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.901879] env[61839]: DEBUG nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 962.904194] env[61839]: DEBUG nova.network.neutron [-] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.930860] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.933303] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 962.933539] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 962.933697] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.933881] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 962.934050] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.934213] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 962.934442] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 962.934605] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 962.934815] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 962.934998] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 962.935249] env[61839]: DEBUG nova.virt.hardware [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.936770] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64746454-fd85-4be1-a444-6c3d76fdadaf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.945642] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70988aa4-8f61-4792-8722-27fc58cd261a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.960802] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.966733] env[61839]: DEBUG oslo.service.loopingcall [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.967040] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.967302] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ea29410-91b0-4d09-9e23-945e73de7dcc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.986626] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.986626] env[61839]: value = "task-1314839" [ 962.986626] env[61839]: _type = "Task" [ 962.986626] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.995696] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314839, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.085354] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314838, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.290977] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.412s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.294029] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.957s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.407991] env[61839]: INFO nova.compute.manager [-] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Took 1.70 seconds to deallocate network for instance. [ 963.489370] env[61839]: DEBUG nova.compute.manager [req-f9c4aacf-496f-4a90-b65c-18ec67f16291 req-cb0684ce-ae18-408c-b339-bea7ddc6f7b6 service nova] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Received event network-vif-deleted-ef2288b6-c4de-43f8-95c9-22511e164c36 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 963.498834] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314839, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.583215] env[61839]: DEBUG oslo_vmware.api [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314838, 'name': PowerOnVM_Task, 'duration_secs': 0.561974} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.583523] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 963.583721] env[61839]: INFO nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Took 5.49 seconds to spawn the instance on the hypervisor. [ 963.583904] env[61839]: DEBUG nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 963.584692] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6d8844-01ab-4e94-a4cb-ad95ee3694f5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.800506] env[61839]: INFO nova.compute.claims [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 963.805661] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3cb5a8d7-2dab-4fe1-9636-3e19d5eeb851 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 31.463s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.806786] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 9.824s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.807377] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.807377] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.807578] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.809071] env[61839]: INFO nova.compute.manager [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Terminating instance [ 963.810673] env[61839]: DEBUG nova.compute.manager [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 963.810931] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.811229] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bcec11cb-e41c-4943-816d-6b4b4a12f47e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.821797] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfecbcca-0ae2-4e05-b9bd-30356c559139 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.854731] env[61839]: WARNING nova.virt.vmwareapi.vmops [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e265dcd3-6ab5-44b1-85be-bad934ebdb79 could not be found. [ 963.854956] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.855203] env[61839]: INFO nova.compute.manager [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Took 0.04 seconds to destroy the instance on the hypervisor. [ 963.855473] env[61839]: DEBUG oslo.service.loopingcall [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.855761] env[61839]: DEBUG nova.compute.manager [-] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.855882] env[61839]: DEBUG nova.network.neutron [-] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.914845] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.999874] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314839, 'name': CreateVM_Task, 'duration_secs': 0.615482} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.000119] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 964.000495] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.000664] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.000993] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 964.002571] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f7b29a5-65f0-4c69-bb09-1eff68581120 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.008282] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 964.008282] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cf7c40-8d03-4249-12f5-a6ed558e0270" [ 964.008282] env[61839]: _type = "Task" [ 964.008282] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.017665] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cf7c40-8d03-4249-12f5-a6ed558e0270, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.102696] env[61839]: INFO nova.compute.manager [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Took 21.12 seconds to build instance. [ 964.306539] env[61839]: INFO nova.compute.resource_tracker [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating resource usage from migration 80376647-f9cc-4390-b3a3-e5cadc03cd80 [ 964.520725] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52cf7c40-8d03-4249-12f5-a6ed558e0270, 'name': SearchDatastore_Task, 'duration_secs': 0.010264} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.521882] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.522141] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 964.522390] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.522560] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.522749] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.523461] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d24528d-fb4f-4660-9d5f-e8adb9ca9889 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.525804] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb21e5fe-a9eb-48d5-ba07-d5c981523fd2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.532142] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f99e6ae-fb73-4dcf-8242-2c6eb6ee83b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.535814] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.535987] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 964.536953] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a40578a-175d-49e6-8d65-6ab5c751a818 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.563935] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc85e9f-564b-4f18-b615-b031daac9711 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.567504] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 964.567504] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eee51c-df4f-9885-83a8-c3bc55d692ed" [ 964.567504] env[61839]: _type = "Task" [ 964.567504] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.573811] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e75e8e-5ae0-46e1-9e4e-8efac4633e50 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.582220] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52eee51c-df4f-9885-83a8-c3bc55d692ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009496} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.583307] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fb303b1-a62d-4dff-9041-8d388125111a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.592685] env[61839]: DEBUG nova.compute.provider_tree [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.597016] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 964.597016] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528150f1-8eaf-1ae1-ab41-c3a7741194c6" [ 964.597016] env[61839]: _type = "Task" [ 964.597016] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.604870] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61f94066-337d-4ee6-ac13-e4b101f6af4a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "75344275-bdf2-4526-a101-e62ec270dd72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.634s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.605125] env[61839]: DEBUG nova.network.neutron [-] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.606161] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528150f1-8eaf-1ae1-ab41-c3a7741194c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.627589] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.628505] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.628631] env[61839]: DEBUG nova.compute.manager [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Going to confirm migration 2 {{(pid=61839) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 965.096030] env[61839]: DEBUG nova.scheduler.client.report [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 965.109113] env[61839]: INFO nova.compute.manager [-] [instance: e265dcd3-6ab5-44b1-85be-bad934ebdb79] Took 1.25 seconds to deallocate network for instance. [ 965.109483] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528150f1-8eaf-1ae1-ab41-c3a7741194c6, 'name': SearchDatastore_Task, 'duration_secs': 0.008897} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.111676] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.111825] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 965.114528] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f527d0b7-e1b3-4a40-9a3b-74016adbb506 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.122073] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 965.122073] env[61839]: value = "task-1314840" [ 965.122073] env[61839]: _type = "Task" [ 965.122073] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.130606] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.226598] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.226796] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.226988] env[61839]: DEBUG nova.network.neutron [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.227202] env[61839]: DEBUG nova.objects.instance [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'info_cache' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.604202] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.310s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.604607] env[61839]: INFO nova.compute.manager [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Migrating [ 965.611271] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.569s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.612770] env[61839]: INFO nova.compute.claims [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.635378] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445598} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.637313] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.637460] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.639823] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2ef467e0-de14-49b6-8eed-d2659a5a3771 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.648575] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 965.648575] env[61839]: value = "task-1314841" [ 965.648575] env[61839]: _type = "Task" [ 965.648575] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.658423] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314841, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.127534] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.127534] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.127534] env[61839]: DEBUG nova.network.neutron [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.145228] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f77f5ae0-de24-4a80-93a3-b3e18f3a5fb0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "e265dcd3-6ab5-44b1-85be-bad934ebdb79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.338s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.159303] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314841, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075458} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.160176] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.161020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d64596f-62ec-4ded-9f7c-e969ce9ac766 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.182171] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.182709] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43a8adf4-862a-4842-9c0b-416279286ac1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.202825] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 966.202825] env[61839]: value = "task-1314842" [ 966.202825] env[61839]: _type = "Task" [ 966.202825] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.211553] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314842, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.462932] env[61839]: DEBUG nova.network.neutron [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [{"id": "bc315481-8651-4be3-bdd5-269b569b2817", "address": "fa:16:3e:ce:61:f0", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc315481-86", "ovs_interfaceid": "bc315481-8651-4be3-bdd5-269b569b2817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.715711] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314842, 'name': ReconfigVM_Task, 'duration_secs': 0.304361} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.716048] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Reconfigured VM instance instance-0000005f to attach disk [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.716679] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d415d1e4-ab65-4a03-a3e9-dfcd3ce826f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.724736] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 966.724736] env[61839]: value = "task-1314843" [ 966.724736] env[61839]: _type = "Task" [ 966.724736] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.736684] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314843, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.836095] env[61839]: DEBUG nova.network.neutron [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance_info_cache with network_info: [{"id": "56222a14-6e55-4e9b-a963-5d868763ad21", "address": "fa:16:3e:5e:f1:1e", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56222a14-6e", "ovs_interfaceid": "56222a14-6e55-4e9b-a963-5d868763ad21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.869789] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.870050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.877263] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a08ce1-81d7-4794-be71-168e0b44b216 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.885346] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5976c4ee-56a5-4555-a452-d5b94a486bf6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.919176] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0703543-0095-49e6-b347-0d4b0f533ec6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.927364] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a037b81-011e-4cc7-8ef9-78d082e5c20e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.942237] env[61839]: DEBUG nova.compute.provider_tree [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.966197] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.966436] env[61839]: DEBUG nova.objects.instance [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'migration_context' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.236208] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314843, 'name': Rename_Task, 'duration_secs': 0.154375} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.236506] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.236766] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13c497c9-bf47-4342-9c93-d24c150ca687 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.243832] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 967.243832] env[61839]: value = "task-1314844" [ 967.243832] env[61839]: _type = "Task" [ 967.243832] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.251171] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.338089] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.372250] env[61839]: DEBUG nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 967.447051] env[61839]: DEBUG nova.scheduler.client.report [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 967.469083] env[61839]: DEBUG nova.objects.base [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 967.470032] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e0636d-7f67-4d19-816f-8cc4d0a1d3c4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.491729] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b21464e-163f-45e4-9e73-f297dbe9489a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.497413] env[61839]: DEBUG oslo_vmware.api [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 967.497413] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5224f59b-757c-e957-cced-73d176572475" [ 967.497413] env[61839]: _type = "Task" [ 967.497413] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.182690] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.571s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.183199] env[61839]: DEBUG nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 968.185322] env[61839]: DEBUG oslo_vmware.api [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5224f59b-757c-e957-cced-73d176572475, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.185499] env[61839]: WARNING oslo_vmware.common.loopingcall [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] task run outlasted interval by 0.18787900000000002 sec [ 968.189111] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.777s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.190395] env[61839]: INFO nova.compute.claims [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 968.206371] env[61839]: DEBUG oslo_vmware.api [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5224f59b-757c-e957-cced-73d176572475, 'name': SearchDatastore_Task, 'duration_secs': 0.007384} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.206371] env[61839]: DEBUG oslo_vmware.api [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314844, 'name': PowerOnVM_Task, 'duration_secs': 0.459936} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.206371] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.206371] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.206658] env[61839]: INFO nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Took 5.30 seconds to spawn the instance on the hypervisor. [ 968.206658] env[61839]: DEBUG nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 968.207441] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3e2234-6f56-4e8b-a5b9-98138ae3ce14 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.217282] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.687575] env[61839]: DEBUG nova.compute.utils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 968.691287] env[61839]: DEBUG nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 968.691528] env[61839]: DEBUG nova.network.neutron [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 968.725424] env[61839]: INFO nova.compute.manager [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Took 25.38 seconds to build instance. [ 968.733778] env[61839]: DEBUG nova.policy [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd83e8a2f63d4ae38c5989c1e3824e3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48d8c406ff504d71bba5fb74caf11c14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 969.020411] env[61839]: DEBUG nova.network.neutron [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Successfully created port: 3dbee357-54cc-4fa4-826b-24aa98397b45 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 969.191885] env[61839]: DEBUG nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 969.198195] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf58b13-9e36-411b-9e9f-4167d9779dd5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.224295] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance '23ee24d5-bccd-497d-a53f-b9723fd9c707' progress to 0 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 969.229021] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ba7cb8a-9f51-4c96-8bb5-515a7d4d8be8 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.891s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.478590] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7098b1-9558-42e2-a7b8-2a67eebf9539 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.487986] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d7e566-d398-4711-86c2-6de79fb59047 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.517935] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464f2984-e719-4cff-8a6c-9999301457d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.527294] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69106a31-6f17-4037-870e-59e193f1ad82 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.542103] env[61839]: DEBUG nova.compute.provider_tree [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.730650] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 969.730650] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a28d4e2f-8120-4a84-87e5-6cfbba2813d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.740307] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 969.740307] env[61839]: value = "task-1314845" [ 969.740307] env[61839]: _type = "Task" [ 969.740307] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.748989] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.758935] env[61839]: INFO nova.compute.manager [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Rebuilding instance [ 969.796598] env[61839]: DEBUG nova.compute.manager [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 969.797501] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db5aee2-61a2-4584-a5af-fdf53ab34485 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.045602] env[61839]: DEBUG nova.scheduler.client.report [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.203165] env[61839]: DEBUG nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 970.230989] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.231398] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.231649] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.231941] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.232195] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.232418] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.232723] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.232957] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.233222] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.233466] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.233722] env[61839]: DEBUG nova.virt.hardware [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.235016] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fe9cb1-21aa-452a-9394-f7d2316caf51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.254835] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779abb64-e911-4dfa-9d37-1b1f3b6f6869 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.258724] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314845, 'name': PowerOffVM_Task, 'duration_secs': 0.251271} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.259006] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.259209] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance '23ee24d5-bccd-497d-a53f-b9723fd9c707' progress to 17 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 970.308388] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.308732] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e554f772-2c18-4bcf-9f26-f730cf1ee2ef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.317903] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 970.317903] env[61839]: value = "task-1314846" [ 970.317903] env[61839]: _type = "Task" [ 970.317903] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.328290] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.453647] env[61839]: DEBUG nova.compute.manager [req-3d02dd61-539d-4292-b704-8d940c20589c req-a16bd61c-46f9-4fff-9787-2237ba94e003 service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Received event network-vif-plugged-3dbee357-54cc-4fa4-826b-24aa98397b45 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 970.453890] env[61839]: DEBUG oslo_concurrency.lockutils [req-3d02dd61-539d-4292-b704-8d940c20589c req-a16bd61c-46f9-4fff-9787-2237ba94e003 service nova] Acquiring lock "73b83239-bbc8-41d1-aec3-2b4519c320af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.454122] env[61839]: DEBUG oslo_concurrency.lockutils [req-3d02dd61-539d-4292-b704-8d940c20589c req-a16bd61c-46f9-4fff-9787-2237ba94e003 service nova] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.454375] env[61839]: DEBUG oslo_concurrency.lockutils [req-3d02dd61-539d-4292-b704-8d940c20589c req-a16bd61c-46f9-4fff-9787-2237ba94e003 service nova] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.454580] env[61839]: DEBUG nova.compute.manager [req-3d02dd61-539d-4292-b704-8d940c20589c req-a16bd61c-46f9-4fff-9787-2237ba94e003 service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] No waiting events found dispatching network-vif-plugged-3dbee357-54cc-4fa4-826b-24aa98397b45 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 970.454839] env[61839]: WARNING nova.compute.manager [req-3d02dd61-539d-4292-b704-8d940c20589c req-a16bd61c-46f9-4fff-9787-2237ba94e003 service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Received unexpected event network-vif-plugged-3dbee357-54cc-4fa4-826b-24aa98397b45 for instance with vm_state building and task_state spawning. [ 970.550733] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.551293] env[61839]: DEBUG nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 970.554332] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.631s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.554332] env[61839]: DEBUG nova.objects.instance [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lazy-loading 'pci_requests' on Instance uuid 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.573808] env[61839]: DEBUG nova.network.neutron [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Successfully updated port: 3dbee357-54cc-4fa4-826b-24aa98397b45 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.767634] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.767863] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.767950] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.768274] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.768483] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.768648] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.768885] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.769146] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.769356] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.769539] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.769757] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.775369] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-514899b6-9a73-48d1-ae92-1ff686d78b60 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.794784] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 970.794784] env[61839]: value = "task-1314847" [ 970.794784] env[61839]: _type = "Task" [ 970.794784] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.804121] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314847, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.827761] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314846, 'name': PowerOffVM_Task, 'duration_secs': 0.143971} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.828056] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.828285] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 970.829110] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e8828a-e80c-4a6d-ae2b-a1932920b632 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.836515] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.836761] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad354803-d787-4d3a-a8b7-6477a827cf3f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.869785] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.870147] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.870395] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleting the datastore file [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.870780] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93dee8c1-01bd-4aae-ab98-163243c069f7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.880338] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 970.880338] env[61839]: value = "task-1314849" [ 970.880338] env[61839]: _type = "Task" [ 970.880338] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.889883] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.058008] env[61839]: DEBUG nova.compute.utils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 971.060774] env[61839]: DEBUG nova.objects.instance [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lazy-loading 'numa_topology' on Instance uuid 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.061883] env[61839]: DEBUG nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 971.062066] env[61839]: DEBUG nova.network.neutron [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 971.076633] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-73b83239-bbc8-41d1-aec3-2b4519c320af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.076830] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-73b83239-bbc8-41d1-aec3-2b4519c320af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.076967] env[61839]: DEBUG nova.network.neutron [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.105237] env[61839]: DEBUG nova.policy [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b9ca67c278b4cb9a83ec3c6ce42af5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5aba1e066cb4400dbbacc92f393962e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 971.305177] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314847, 'name': ReconfigVM_Task, 'duration_secs': 0.169877} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.305495] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance '23ee24d5-bccd-497d-a53f-b9723fd9c707' progress to 33 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 971.345995] env[61839]: DEBUG nova.network.neutron [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Successfully created port: de313fb8-8012-41f5-b060-5f843422a301 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 971.392560] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106289} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.392914] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 971.393194] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 971.393299] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 971.562685] env[61839]: DEBUG nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 971.565493] env[61839]: INFO nova.compute.claims [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.606584] env[61839]: DEBUG nova.network.neutron [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 971.803507] env[61839]: DEBUG nova.network.neutron [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Updating instance_info_cache with network_info: [{"id": "3dbee357-54cc-4fa4-826b-24aa98397b45", "address": "fa:16:3e:6c:55:bb", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dbee357-54", "ovs_interfaceid": "3dbee357-54cc-4fa4-826b-24aa98397b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.813915] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.813915] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.813915] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.814199] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.814362] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.814543] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.814757] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.814984] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.815187] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.815360] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.815542] env[61839]: DEBUG nova.virt.hardware [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.820729] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 971.821588] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e05339d-8eb9-4056-8696-c0de29f643e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.841449] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 971.841449] env[61839]: value = "task-1314850" [ 971.841449] env[61839]: _type = "Task" [ 971.841449] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.850059] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314850, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.305800] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-73b83239-bbc8-41d1-aec3-2b4519c320af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.306226] env[61839]: DEBUG nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Instance network_info: |[{"id": "3dbee357-54cc-4fa4-826b-24aa98397b45", "address": "fa:16:3e:6c:55:bb", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dbee357-54", "ovs_interfaceid": "3dbee357-54cc-4fa4-826b-24aa98397b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 972.306680] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:55:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dbee357-54cc-4fa4-826b-24aa98397b45', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.314548] env[61839]: DEBUG oslo.service.loopingcall [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.314869] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 972.315132] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b663ab2b-2e20-4584-802c-699fac5e5905 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.335343] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.335343] env[61839]: value = "task-1314851" [ 972.335343] env[61839]: _type = "Task" [ 972.335343] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.344572] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314851, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.352308] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314850, 'name': ReconfigVM_Task, 'duration_secs': 0.176375} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.352574] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 972.353383] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da53a2ec-65f5-4cd5-8ee7-41f6235f5398 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.375386] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 23ee24d5-bccd-497d-a53f-b9723fd9c707/23ee24d5-bccd-497d-a53f-b9723fd9c707.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.376080] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e998cc3d-0819-4a03-9699-6f51439f6064 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.395609] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 972.395609] env[61839]: value = "task-1314852" [ 972.395609] env[61839]: _type = "Task" [ 972.395609] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.407375] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314852, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.431651] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 972.431917] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 972.432095] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.432309] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 972.432485] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.432643] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 972.432943] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 972.433216] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 972.433356] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 972.433554] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 972.433738] env[61839]: DEBUG nova.virt.hardware [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 972.434639] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6552503e-28cc-48e4-ae2b-4d7142000be6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.442692] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f2da11-edf8-445a-8831-c4bdf7e7bba7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.457771] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Instance VIF info [] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.463391] env[61839]: DEBUG oslo.service.loopingcall [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.463629] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 972.463848] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd6673f7-51a3-4978-a671-6424a06e567d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.480872] env[61839]: DEBUG nova.compute.manager [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Received event network-changed-3dbee357-54cc-4fa4-826b-24aa98397b45 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 972.481132] env[61839]: DEBUG nova.compute.manager [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Refreshing instance network info cache due to event network-changed-3dbee357-54cc-4fa4-826b-24aa98397b45. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 972.481306] env[61839]: DEBUG oslo_concurrency.lockutils [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] Acquiring lock "refresh_cache-73b83239-bbc8-41d1-aec3-2b4519c320af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.481466] env[61839]: DEBUG oslo_concurrency.lockutils [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] Acquired lock "refresh_cache-73b83239-bbc8-41d1-aec3-2b4519c320af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.481632] env[61839]: DEBUG nova.network.neutron [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Refreshing network info cache for port 3dbee357-54cc-4fa4-826b-24aa98397b45 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.484379] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.484379] env[61839]: value = "task-1314853" [ 972.484379] env[61839]: _type = "Task" [ 972.484379] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.494302] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314853, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.576374] env[61839]: DEBUG nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 972.606382] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 972.606725] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 972.606928] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.607859] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 972.607859] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.607859] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 972.608079] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 972.608322] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 972.608545] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 972.608758] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 972.608990] env[61839]: DEBUG nova.virt.hardware [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 972.609972] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9e10ec-7f3b-4623-bfdf-aa1e64325882 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.622529] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd180d26-4f18-449f-aeca-ad1987ccd1f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.848512] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314851, 'name': CreateVM_Task, 'duration_secs': 0.32242} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.848732] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.849752] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.849948] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.850455] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 972.853739] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6c7f223-8e9d-4f77-b16c-692630067bf9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.861637] env[61839]: DEBUG nova.network.neutron [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Successfully updated port: de313fb8-8012-41f5-b060-5f843422a301 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.862691] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 972.862691] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5238ee10-81e2-0066-942e-2be1b9edc1d3" [ 972.862691] env[61839]: _type = "Task" [ 972.862691] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.872538] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5238ee10-81e2-0066-942e-2be1b9edc1d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.875282] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45be8f48-58ca-437d-b8d5-23f9b1dfb43a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.884208] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc203a3-549b-4bcf-a19e-8a1fcccd2f80 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.920055] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07e5257-3875-4293-8d91-112229905c83 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.928366] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314852, 'name': ReconfigVM_Task, 'duration_secs': 0.288416} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.930574] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 23ee24d5-bccd-497d-a53f-b9723fd9c707/23ee24d5-bccd-497d-a53f-b9723fd9c707.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.930805] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance '23ee24d5-bccd-497d-a53f-b9723fd9c707' progress to 50 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 972.935843] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8794cde5-4ebb-486f-aca4-f4e20e65828e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.954689] env[61839]: DEBUG nova.compute.provider_tree [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.996550] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314853, 'name': CreateVM_Task, 'duration_secs': 0.254311} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.996751] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.997158] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.203455] env[61839]: DEBUG nova.network.neutron [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Updated VIF entry in instance network info cache for port 3dbee357-54cc-4fa4-826b-24aa98397b45. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 973.203815] env[61839]: DEBUG nova.network.neutron [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Updating instance_info_cache with network_info: [{"id": "3dbee357-54cc-4fa4-826b-24aa98397b45", "address": "fa:16:3e:6c:55:bb", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dbee357-54", "ovs_interfaceid": "3dbee357-54cc-4fa4-826b-24aa98397b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.363974] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "refresh_cache-3c832102-cacc-4dd8-a336-2aa1d8bd8116" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.364285] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "refresh_cache-3c832102-cacc-4dd8-a336-2aa1d8bd8116" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.364451] env[61839]: DEBUG nova.network.neutron [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 973.377865] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5238ee10-81e2-0066-942e-2be1b9edc1d3, 'name': SearchDatastore_Task, 'duration_secs': 0.0119} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.378179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.378420] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.378659] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.378810] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.378994] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.379308] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.379607] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 973.379837] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bae0d62-e1fb-4909-8864-e79550b53e6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.381585] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46324b19-a441-4af4-879d-f4ea5fb2596f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.387566] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 973.387566] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c1934c-3239-aab1-5132-13b2a1233cf5" [ 973.387566] env[61839]: _type = "Task" [ 973.387566] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.391146] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.391328] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.392507] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79fe15c8-e407-4e3b-984a-537a4e723ec7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.397393] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c1934c-3239-aab1-5132-13b2a1233cf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.400384] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 973.400384] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b32b8d-d0e2-cd43-13be-513fc72eb228" [ 973.400384] env[61839]: _type = "Task" [ 973.400384] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.407494] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b32b8d-d0e2-cd43-13be-513fc72eb228, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.442574] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3355ff3a-e5f0-499c-9e08-29208c7ce45c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.461908] env[61839]: DEBUG nova.scheduler.client.report [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 973.465492] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ac0bd0-aca4-4a65-8595-9dd965be70dc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.483706] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance '23ee24d5-bccd-497d-a53f-b9723fd9c707' progress to 67 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 973.706768] env[61839]: DEBUG oslo_concurrency.lockutils [req-695dd52d-cbd1-4223-83bd-eb43497ada9f req-c9c3fd6c-dc85-4727-9efb-dfdb6b20f38b service nova] Releasing lock "refresh_cache-73b83239-bbc8-41d1-aec3-2b4519c320af" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.898383] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c1934c-3239-aab1-5132-13b2a1233cf5, 'name': SearchDatastore_Task, 'duration_secs': 0.010177} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.898536] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.898782] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.899056] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.899760] env[61839]: DEBUG nova.network.neutron [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.909841] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b32b8d-d0e2-cd43-13be-513fc72eb228, 'name': SearchDatastore_Task, 'duration_secs': 0.0103} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.910582] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7899943-c578-4260-951e-37846c64bf8a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.915377] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 973.915377] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52458606-2f30-4b00-36da-a1c9c27c3455" [ 973.915377] env[61839]: _type = "Task" [ 973.915377] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.924761] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52458606-2f30-4b00-36da-a1c9c27c3455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.969589] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.415s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.971291] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.030349] env[61839]: DEBUG nova.network.neutron [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Updating instance_info_cache with network_info: [{"id": "de313fb8-8012-41f5-b060-5f843422a301", "address": "fa:16:3e:84:7e:e1", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde313fb8-80", "ovs_interfaceid": "de313fb8-8012-41f5-b060-5f843422a301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.037835] env[61839]: DEBUG nova.network.neutron [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Port 56222a14-6e55-4e9b-a963-5d868763ad21 binding to destination host cpu-1 is already ACTIVE {{(pid=61839) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 974.045291] env[61839]: INFO nova.network.neutron [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating port ef8176cf-7494-44f4-a600-7dedff162419 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 974.427499] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52458606-2f30-4b00-36da-a1c9c27c3455, 'name': SearchDatastore_Task, 'duration_secs': 0.010152} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.427896] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.428016] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.428331] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.428579] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.428811] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44e08060-b68d-4082-a0d0-5376bdaf38ca {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.430700] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a91c3aac-9405-4344-9c7b-d7c09af7708d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.437440] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 974.437440] env[61839]: value = "task-1314854" [ 974.437440] env[61839]: _type = "Task" [ 974.437440] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.441207] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.441392] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.442375] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76a7a372-5a5b-4890-a4fd-82558fdb7ae2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.447349] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.450851] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 974.450851] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5214b953-11aa-3f07-e52d-5edaac702ba9" [ 974.450851] env[61839]: _type = "Task" [ 974.450851] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.458599] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5214b953-11aa-3f07-e52d-5edaac702ba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.507301] env[61839]: DEBUG nova.compute.manager [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Received event network-vif-plugged-de313fb8-8012-41f5-b060-5f843422a301 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.507423] env[61839]: DEBUG oslo_concurrency.lockutils [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] Acquiring lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.507638] env[61839]: DEBUG oslo_concurrency.lockutils [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.507807] env[61839]: DEBUG oslo_concurrency.lockutils [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.507988] env[61839]: DEBUG nova.compute.manager [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] No waiting events found dispatching network-vif-plugged-de313fb8-8012-41f5-b060-5f843422a301 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 974.508183] env[61839]: WARNING nova.compute.manager [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Received unexpected event network-vif-plugged-de313fb8-8012-41f5-b060-5f843422a301 for instance with vm_state building and task_state spawning. [ 974.508353] env[61839]: DEBUG nova.compute.manager [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Received event network-changed-de313fb8-8012-41f5-b060-5f843422a301 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.508513] env[61839]: DEBUG nova.compute.manager [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Refreshing instance network info cache due to event network-changed-de313fb8-8012-41f5-b060-5f843422a301. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 974.508682] env[61839]: DEBUG oslo_concurrency.lockutils [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] Acquiring lock "refresh_cache-3c832102-cacc-4dd8-a336-2aa1d8bd8116" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.532838] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "refresh_cache-3c832102-cacc-4dd8-a336-2aa1d8bd8116" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.533134] env[61839]: DEBUG nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Instance network_info: |[{"id": "de313fb8-8012-41f5-b060-5f843422a301", "address": "fa:16:3e:84:7e:e1", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde313fb8-80", "ovs_interfaceid": "de313fb8-8012-41f5-b060-5f843422a301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 974.533413] env[61839]: DEBUG oslo_concurrency.lockutils [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] Acquired lock "refresh_cache-3c832102-cacc-4dd8-a336-2aa1d8bd8116" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.533592] env[61839]: DEBUG nova.network.neutron [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Refreshing network info cache for port de313fb8-8012-41f5-b060-5f843422a301 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.535204] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:7e:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de313fb8-8012-41f5-b060-5f843422a301', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.543425] env[61839]: DEBUG oslo.service.loopingcall [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.549804] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.550483] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5571603d-ae3c-4137-8fea-172523dbc58f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.575137] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.575137] env[61839]: value = "task-1314855" [ 974.575137] env[61839]: _type = "Task" [ 974.575137] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.586142] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314855, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.950124] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314854, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474844} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.950445] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.950673] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.950961] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e95a2512-eb35-49fd-8c66-85bf065d6b56 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.966940] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5214b953-11aa-3f07-e52d-5edaac702ba9, 'name': SearchDatastore_Task, 'duration_secs': 0.008485} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.968767] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 974.968767] env[61839]: value = "task-1314856" [ 974.968767] env[61839]: _type = "Task" [ 974.968767] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.969011] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1584fd0f-82e9-4c3b-98fa-b01978a5673c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.983993] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Applying migration context for instance fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 as it has an incoming, in-progress migration ede2e43c-56ba-4f49-9646-56aeac32d3ea. Migration status is confirming {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 974.984257] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Applying migration context for instance 23ee24d5-bccd-497d-a53f-b9723fd9c707 as it has an incoming, in-progress migration 80376647-f9cc-4390-b3a3-e5cadc03cd80. Migration status is post-migrating {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 974.986173] env[61839]: INFO nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating resource usage from migration ede2e43c-56ba-4f49-9646-56aeac32d3ea [ 974.986519] env[61839]: INFO nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating resource usage from migration 80376647-f9cc-4390-b3a3-e5cadc03cd80 [ 974.988767] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.990510] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 974.990510] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528eb415-7cdb-7dee-f6f2-ac1f71a05cf2" [ 974.990510] env[61839]: _type = "Task" [ 974.990510] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.002413] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528eb415-7cdb-7dee-f6f2-ac1f71a05cf2, 'name': SearchDatastore_Task, 'duration_secs': 0.013337} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.002719] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.003085] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 975.003442] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e926e91-6e0f-4710-8a52-d32ece1aa02f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.012034] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 975.012034] env[61839]: value = "task-1314857" [ 975.012034] env[61839]: _type = "Task" [ 975.012034] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.013025] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5c29c188-a34b-4751-9f8b-166af7b15088 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.013156] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 603191b6-a4b0-451b-b98b-f3dbfb684300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.013280] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.013427] env[61839]: WARNING nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 56369316-a445-4a2a-a0a6-967074104e19 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 975.013554] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.013665] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 694a5d4b-3673-406b-a24a-d37fad33e549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.013780] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d718d866-dd6c-4332-b63a-be6850a5a785 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.013895] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d4a8c153-7585-4c78-8aa4-56077e0a7af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.014033] env[61839]: WARNING nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 975.014157] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.014280] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Migration ede2e43c-56ba-4f49-9646-56aeac32d3ea is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 975.014383] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.014494] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 75344275-bdf2-4526-a101-e62ec270dd72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.014595] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a262845a-0ae2-4e0e-9040-01f0ed37c95c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.014707] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 86525ea7-af75-4b10-85a1-c0fbab73ea5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.014816] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Migration 80376647-f9cc-4390-b3a3-e5cadc03cd80 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 975.014958] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 23ee24d5-bccd-497d-a53f-b9723fd9c707 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.015082] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 73b83239-bbc8-41d1-aec3-2b4519c320af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.015194] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3c832102-cacc-4dd8-a336-2aa1d8bd8116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.026016] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.066195] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.066436] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.066615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.089080] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314855, 'name': CreateVM_Task, 'duration_secs': 0.494838} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.089268] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.089924] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.090120] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.090457] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 975.090723] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d5536fd-887b-40ad-95ca-68004d94e267 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.096492] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 975.096492] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527760d5-96e0-c071-e0db-d8e8cea2d88a" [ 975.096492] env[61839]: _type = "Task" [ 975.096492] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.105099] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527760d5-96e0-c071-e0db-d8e8cea2d88a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.314456] env[61839]: DEBUG nova.network.neutron [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Updated VIF entry in instance network info cache for port de313fb8-8012-41f5-b060-5f843422a301. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.314901] env[61839]: DEBUG nova.network.neutron [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Updating instance_info_cache with network_info: [{"id": "de313fb8-8012-41f5-b060-5f843422a301", "address": "fa:16:3e:84:7e:e1", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde313fb8-80", "ovs_interfaceid": "de313fb8-8012-41f5-b060-5f843422a301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.483313] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067491} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.483721] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.484482] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6152f7-b8f2-4241-8011-5e28790b462a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.511272] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.511666] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6d56579-8c1c-4b2a-ba20-76e1bad5499b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.527743] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a87f3a17-0a97-4b47-bc95-eee5975f8203 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.528077] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 975.528234] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3904MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 975.540519] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314857, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.542334] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 975.542334] env[61839]: value = "task-1314858" [ 975.542334] env[61839]: _type = "Task" [ 975.542334] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.554077] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.611507] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527760d5-96e0-c071-e0db-d8e8cea2d88a, 'name': SearchDatastore_Task, 'duration_secs': 0.009117} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.611832] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.612526] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.613410] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.613410] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.613410] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.613642] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-255b85b8-b4c6-46bf-b957-7a15d889d0f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.630273] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.631490] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.631737] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95539db7-c763-4a57-bcd8-08a1afb0f26e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.643728] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 975.643728] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52155f02-0edd-808c-650f-9d82e296fd6b" [ 975.643728] env[61839]: _type = "Task" [ 975.643728] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.656968] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52155f02-0edd-808c-650f-9d82e296fd6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.672017] env[61839]: DEBUG nova.compute.manager [req-c9a0a8a2-a718-4ed7-b1d1-3c1f98117e62 req-15588e2a-5e80-446b-a3be-7316138c05f7 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received event network-vif-plugged-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.676337] env[61839]: DEBUG oslo_concurrency.lockutils [req-c9a0a8a2-a718-4ed7-b1d1-3c1f98117e62 req-15588e2a-5e80-446b-a3be-7316138c05f7 service nova] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.676337] env[61839]: DEBUG oslo_concurrency.lockutils [req-c9a0a8a2-a718-4ed7-b1d1-3c1f98117e62 req-15588e2a-5e80-446b-a3be-7316138c05f7 service nova] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.676337] env[61839]: DEBUG oslo_concurrency.lockutils [req-c9a0a8a2-a718-4ed7-b1d1-3c1f98117e62 req-15588e2a-5e80-446b-a3be-7316138c05f7 service nova] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.676337] env[61839]: DEBUG nova.compute.manager [req-c9a0a8a2-a718-4ed7-b1d1-3c1f98117e62 req-15588e2a-5e80-446b-a3be-7316138c05f7 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] No waiting events found dispatching network-vif-plugged-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 975.676337] env[61839]: WARNING nova.compute.manager [req-c9a0a8a2-a718-4ed7-b1d1-3c1f98117e62 req-15588e2a-5e80-446b-a3be-7316138c05f7 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received unexpected event network-vif-plugged-ef8176cf-7494-44f4-a600-7dedff162419 for instance with vm_state shelved_offloaded and task_state spawning. [ 975.732651] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.735596] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.735596] env[61839]: DEBUG nova.network.neutron [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.820850] env[61839]: DEBUG oslo_concurrency.lockutils [req-d6a3d699-15f6-48a3-977d-8bcf8e0f3ab8 req-1e72ce84-391f-47b1-9205-d1cabea27891 service nova] Releasing lock "refresh_cache-3c832102-cacc-4dd8-a336-2aa1d8bd8116" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.844220] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e79f2b-e2f5-4dd2-9465-8a79577c1e88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.852301] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2d7b25-0e62-4a1e-b2f9-1315be35e067 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.886302] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4832ca6-ed79-479a-ac8f-e7472c882da3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.895403] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a664e8dd-10d5-4c61-a708-64afe2cab8d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.910329] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.041347] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.810856} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.041785] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.042157] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.042543] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4117e243-6e04-487b-80b3-a3adec7aa871 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.057986] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314858, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.060018] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 976.060018] env[61839]: value = "task-1314859" [ 976.060018] env[61839]: _type = "Task" [ 976.060018] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.071947] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.106371] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.106656] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.107184] env[61839]: DEBUG nova.network.neutron [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.155551] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52155f02-0edd-808c-650f-9d82e296fd6b, 'name': SearchDatastore_Task, 'duration_secs': 0.054344} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.156875] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c149f8cc-0527-461f-9a65-768a44b13ad7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.162897] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 976.162897] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52119b9e-ec32-3470-0a61-9a249eff1be0" [ 976.162897] env[61839]: _type = "Task" [ 976.162897] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.172858] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52119b9e-ec32-3470-0a61-9a249eff1be0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.413751] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.498510] env[61839]: DEBUG nova.network.neutron [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.558075] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314858, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.570191] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.408581} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.570486] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 976.571309] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ef70bc-7929-4912-88f5-eb873d787ff1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.593932] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.594345] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11cbc27d-eb86-445a-912e-42e4e2dd8d7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.618717] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 976.618717] env[61839]: value = "task-1314860" [ 976.618717] env[61839]: _type = "Task" [ 976.618717] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.628098] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314860, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.674446] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52119b9e-ec32-3470-0a61-9a249eff1be0, 'name': SearchDatastore_Task, 'duration_secs': 0.037654} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.674936] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.675061] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 3c832102-cacc-4dd8-a336-2aa1d8bd8116/3c832102-cacc-4dd8-a336-2aa1d8bd8116.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.675317] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0b8d0ec-9a0b-44d2-ac4e-7307c6770289 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.684222] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 976.684222] env[61839]: value = "task-1314861" [ 976.684222] env[61839]: _type = "Task" [ 976.684222] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.695901] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.918642] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 976.918929] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.948s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.919362] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.989s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.919580] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.922671] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.008s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.923085] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.925236] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.719s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.950281] env[61839]: INFO nova.scheduler.client.report [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleted allocations for instance 56369316-a445-4a2a-a0a6-967074104e19 [ 976.968126] env[61839]: INFO nova.scheduler.client.report [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Deleted allocations for instance 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9 [ 977.001278] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.036509] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='7e1840d95d6e8cc0aa0ba5e8b801dd30',container_format='bare',created_at=2024-10-18T16:59:42Z,direct_url=,disk_format='vmdk',id=5fe38bf0-d665-4992-a6d0-c15bcea2316d,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-584214621-shelved',owner='c7a300fe2748456bb4a522a4d7c0d0f4',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2024-10-18T16:59:56Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 977.036975] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 977.037315] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.037670] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 977.037981] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.038310] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 977.038710] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 977.039040] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 977.039759] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 977.039759] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 977.040363] env[61839]: DEBUG nova.virt.hardware [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 977.041390] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2fd96d-9995-4abc-9549-d7803f87133f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.046608] env[61839]: DEBUG nova.network.neutron [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance_info_cache with network_info: [{"id": "56222a14-6e55-4e9b-a963-5d868763ad21", "address": "fa:16:3e:5e:f1:1e", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56222a14-6e", "ovs_interfaceid": "56222a14-6e55-4e9b-a963-5d868763ad21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.060073] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ca4daf-4464-4095-a9e8-70824ab0144a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.072453] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314858, 'name': ReconfigVM_Task, 'duration_secs': 1.190439} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.073879] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.075314] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f87b7c1a-4b9c-4085-8670-ed9ee26a8479 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.091765] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:30:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef8176cf-7494-44f4-a600-7dedff162419', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.099629] env[61839]: DEBUG oslo.service.loopingcall [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.100767] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.101084] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37bb59af-cdb2-462d-8c97-d6fdace05861 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.118104] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 977.118104] env[61839]: value = "task-1314862" [ 977.118104] env[61839]: _type = "Task" [ 977.118104] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.130719] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314862, 'name': Rename_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.135023] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.135023] env[61839]: value = "task-1314863" [ 977.135023] env[61839]: _type = "Task" [ 977.135023] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.135481] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314860, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.146257] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314863, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.195103] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314861, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.464055] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ed1f6438-e53f-428f-8070-2a7e88ba6407 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "56369316-a445-4a2a-a0a6-967074104e19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.947s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.478764] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c0666967-91a2-4589-a6fb-7ee1067b5a38 tempest-ImagesTestJSON-742018210 tempest-ImagesTestJSON-742018210-project-member] Lock "21e1c5b2-9865-457b-87c8-ce56c3c7b8f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.477s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.550016] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.636977] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314862, 'name': Rename_Task, 'duration_secs': 0.412565} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.643228] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.643522] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314860, 'name': ReconfigVM_Task, 'duration_secs': 0.916168} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.645991] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84b667dd-04ab-43f5-9ec6-793579381665 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.647600] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Reconfigured VM instance instance-0000005f to attach disk [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c/a262845a-0ae2-4e0e-9040-01f0ed37c95c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.648763] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7424b684-2c4d-4999-94fa-f2d938e949d4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.653925] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314863, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.655203] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 977.655203] env[61839]: value = "task-1314864" [ 977.655203] env[61839]: _type = "Task" [ 977.655203] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.661280] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 977.661280] env[61839]: value = "task-1314865" [ 977.661280] env[61839]: _type = "Task" [ 977.661280] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.668226] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.672590] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314865, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.674419] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f90373e-e050-4998-8691-033f3d3e8eda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.682578] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a2f655-8e9d-43d4-917b-c3327592f6f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.695452] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314861, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79587} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.723331] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 3c832102-cacc-4dd8-a336-2aa1d8bd8116/3c832102-cacc-4dd8-a336-2aa1d8bd8116.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.723608] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.725327] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5ef17f4-0bae-4ec4-bf66-ace77d94c5b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.728013] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f7e1d1-a2ac-40c9-baac-f23604876898 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.731633] env[61839]: DEBUG nova.compute.manager [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received event network-changed-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.731826] env[61839]: DEBUG nova.compute.manager [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Refreshing instance network info cache due to event network-changed-ef8176cf-7494-44f4-a600-7dedff162419. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 977.732060] env[61839]: DEBUG oslo_concurrency.lockutils [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.732222] env[61839]: DEBUG oslo_concurrency.lockutils [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.732393] env[61839]: DEBUG nova.network.neutron [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Refreshing network info cache for port ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 977.742868] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a26052-d165-4e21-8665-c30ad6c7a796 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.747781] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 977.747781] env[61839]: value = "task-1314866" [ 977.747781] env[61839]: _type = "Task" [ 977.747781] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.763476] env[61839]: DEBUG nova.compute.provider_tree [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.768441] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.077927] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf2a31e-495e-47e7-8706-0f0c92099dec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.100296] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4deee98-1daf-41e3-8f38-5510a8a9f6b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.109746] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance '23ee24d5-bccd-497d-a53f-b9723fd9c707' progress to 83 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 978.150282] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314863, 'name': CreateVM_Task, 'duration_secs': 0.638058} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.150423] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 978.151587] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.151836] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.152396] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 978.152717] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff28a667-a4d1-4f01-9e09-17a2325d3d32 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.161278] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 978.161278] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52a798e7-e0e1-d02a-12fd-d75a57b8fafd" [ 978.161278] env[61839]: _type = "Task" [ 978.161278] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.182899] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.183270] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Processing image 5fe38bf0-d665-4992-a6d0-c15bcea2316d {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 978.183565] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d/5fe38bf0-d665-4992-a6d0-c15bcea2316d.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.183763] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d/5fe38bf0-d665-4992-a6d0-c15bcea2316d.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.183958] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.184649] env[61839]: DEBUG oslo_vmware.api [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314864, 'name': PowerOnVM_Task, 'duration_secs': 0.51931} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.184981] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314865, 'name': Rename_Task, 'duration_secs': 0.173845} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.185264] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d8558de-7400-44cc-95e2-b9d4b0d16e6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.187396] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 978.187659] env[61839]: INFO nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Took 7.98 seconds to spawn the instance on the hypervisor. [ 978.187885] env[61839]: DEBUG nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 978.188236] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.189656] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d890e60-9a88-4e5a-a6a9-e041c82e1176 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.191742] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28956f63-ed77-4bcf-ac92-3aa2df4af23b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.203465] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 978.203465] env[61839]: value = "task-1314867" [ 978.203465] env[61839]: _type = "Task" [ 978.203465] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.209591] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.209746] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 978.217116] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2c6ef17-0d44-4fa7-89fa-b8017d2822e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.223021] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.223834] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 978.223834] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52efcd21-d64d-11ba-afcd-6903587e9e08" [ 978.223834] env[61839]: _type = "Task" [ 978.223834] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.233788] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52efcd21-d64d-11ba-afcd-6903587e9e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.260361] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10921} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.260649] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.261434] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fb1859-44f4-4720-92e4-14fcf4f7678a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.277600] env[61839]: DEBUG nova.scheduler.client.report [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.291093] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 3c832102-cacc-4dd8-a336-2aa1d8bd8116/3c832102-cacc-4dd8-a336-2aa1d8bd8116.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.293983] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f721f04-4f1a-46a2-8b2e-488d50473c1d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.317524] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 978.317524] env[61839]: value = "task-1314868" [ 978.317524] env[61839]: _type = "Task" [ 978.317524] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.327584] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.598206] env[61839]: DEBUG nova.network.neutron [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updated VIF entry in instance network info cache for port ef8176cf-7494-44f4-a600-7dedff162419. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 978.598598] env[61839]: DEBUG nova.network.neutron [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.618494] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.619099] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab0de9ce-951e-4f37-be11-d9723b058375 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.628041] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 978.628041] env[61839]: value = "task-1314870" [ 978.628041] env[61839]: _type = "Task" [ 978.628041] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.640365] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.712137] env[61839]: INFO nova.compute.manager [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Took 20.69 seconds to build instance. [ 978.724677] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.733862] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Preparing fetch location {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 978.733862] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Fetch image to [datastore1] OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21/OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21.vmdk {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 978.734144] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Downloading stream optimized image 5fe38bf0-d665-4992-a6d0-c15bcea2316d to [datastore1] OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21/OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21.vmdk on the data store datastore1 as vApp {{(pid=61839) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 978.735207] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Downloading image file data 5fe38bf0-d665-4992-a6d0-c15bcea2316d to the ESX as VM named 'OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21' {{(pid=61839) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 978.811673] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 978.811673] env[61839]: value = "resgroup-9" [ 978.811673] env[61839]: _type = "ResourcePool" [ 978.811673] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 978.812341] env[61839]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-92125a0a-699e-48a6-917b-4238e8fa323c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.837758] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314868, 'name': ReconfigVM_Task, 'duration_secs': 0.337258} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.838987] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 3c832102-cacc-4dd8-a336-2aa1d8bd8116/3c832102-cacc-4dd8-a336-2aa1d8bd8116.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.839703] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lease: (returnval){ [ 978.839703] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec0316-b72d-2f5f-a1a1-6d60c2fd39d2" [ 978.839703] env[61839]: _type = "HttpNfcLease" [ 978.839703] env[61839]: } obtained for vApp import into resource pool (val){ [ 978.839703] env[61839]: value = "resgroup-9" [ 978.839703] env[61839]: _type = "ResourcePool" [ 978.839703] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 978.840018] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the lease: (returnval){ [ 978.840018] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec0316-b72d-2f5f-a1a1-6d60c2fd39d2" [ 978.840018] env[61839]: _type = "HttpNfcLease" [ 978.840018] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 978.840157] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc6911e2-2f01-4e26-ab10-f424b9a8ff64 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.848215] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.848215] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec0316-b72d-2f5f-a1a1-6d60c2fd39d2" [ 978.848215] env[61839]: _type = "HttpNfcLease" [ 978.848215] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 978.849539] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 978.849539] env[61839]: value = "task-1314872" [ 978.849539] env[61839]: _type = "Task" [ 978.849539] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.859822] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314872, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.101992] env[61839]: DEBUG oslo_concurrency.lockutils [req-2ebc95d5-6fbf-400e-a4d5-09845f65493a req-164dd290-0799-4173-a0a4-243576813fd4 service nova] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.139807] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314870, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.218345] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0abcef39-20e2-4d1f-9e70-67180de20850 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.214s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.219027] env[61839]: DEBUG oslo_vmware.api [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314867, 'name': PowerOnVM_Task, 'duration_secs': 0.861751} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.219854] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.220625] env[61839]: DEBUG nova.compute.manager [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 979.221816] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e396ab83-8274-4e0b-bbb3-d10aca16730c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.299220] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.373s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.299220] env[61839]: DEBUG nova.compute.manager [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=61839) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4909}} [ 979.302522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.085s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.304509] env[61839]: INFO nova.compute.claims [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.352026] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.352026] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec0316-b72d-2f5f-a1a1-6d60c2fd39d2" [ 979.352026] env[61839]: _type = "HttpNfcLease" [ 979.352026] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 979.362077] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314872, 'name': Rename_Task, 'duration_secs': 0.170672} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.362637] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.363867] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-015d28bb-ec61-47c5-9fe9-a023ca00a878 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.371883] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 979.371883] env[61839]: value = "task-1314873" [ 979.371883] env[61839]: _type = "Task" [ 979.371883] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.383132] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314873, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.640099] env[61839]: DEBUG oslo_vmware.api [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314870, 'name': PowerOnVM_Task, 'duration_secs': 0.784141} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.640848] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.641201] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2110b186-7030-4148-a543-65a3916b2aa1 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance '23ee24d5-bccd-497d-a53f-b9723fd9c707' progress to 100 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 979.739548] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.855796] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.855796] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec0316-b72d-2f5f-a1a1-6d60c2fd39d2" [ 979.855796] env[61839]: _type = "HttpNfcLease" [ 979.855796] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 979.885142] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314873, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.891368] env[61839]: INFO nova.scheduler.client.report [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted allocation for migration ede2e43c-56ba-4f49-9646-56aeac32d3ea [ 980.355624] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 980.355624] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec0316-b72d-2f5f-a1a1-6d60c2fd39d2" [ 980.355624] env[61839]: _type = "HttpNfcLease" [ 980.355624] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 980.355624] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 980.355624] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ec0316-b72d-2f5f-a1a1-6d60c2fd39d2" [ 980.355624] env[61839]: _type = "HttpNfcLease" [ 980.355624] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 980.355624] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0d2cc7-0c67-4842-a072-30f2d0bc94f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.364658] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52384687-7e60-37d9-6d9b-d11af8e582c2/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 980.364894] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52384687-7e60-37d9-6d9b-d11af8e582c2/disk-0.vmdk. {{(pid=61839) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 980.427327] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d4d06f1-d301-4bef-8b8f-4425a4530ba6 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.798s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.428149] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "73b83239-bbc8-41d1-aec3-2b4519c320af" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.428324] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.428452] env[61839]: DEBUG nova.compute.manager [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 980.442317] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34d3437-e9f9-4b63-aafc-1b52c6b046d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.463150] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b1f57a57-72d3-4589-a534-cc0473978a83 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.465561] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314873, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.465839] env[61839]: DEBUG nova.compute.manager [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61839) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 980.466697] env[61839]: DEBUG nova.objects.instance [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'flavor' on Instance uuid 73b83239-bbc8-41d1-aec3-2b4519c320af {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.737329] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2e4255-bdc3-43aa-8e17-01d9c915f06f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.752812] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e19372-fade-4ec2-84f6-14873543bc09 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.799261] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0902fb04-dda4-4582-abcb-2e18bce73860 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.810743] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fa234e-dd7e-4b6c-9ccb-041fa1d69fac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.827892] env[61839]: DEBUG nova.compute.provider_tree [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.939417] env[61839]: DEBUG oslo_vmware.api [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314873, 'name': PowerOnVM_Task, 'duration_secs': 1.308398} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.939764] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.940020] env[61839]: INFO nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Took 8.36 seconds to spawn the instance on the hypervisor. [ 980.940231] env[61839]: DEBUG nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 980.942719] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491e2774-3845-4520-ab32-f288d90d32fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.978058] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.978352] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89122328-eb9a-4bcd-be4a-c1ca9d839ee2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.987624] env[61839]: DEBUG oslo_vmware.api [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 980.987624] env[61839]: value = "task-1314874" [ 980.987624] env[61839]: _type = "Task" [ 980.987624] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.997821] env[61839]: DEBUG oslo_vmware.api [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.097514] env[61839]: DEBUG nova.objects.instance [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'flavor' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.135190] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.135652] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.136036] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.136520] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.136666] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.139830] env[61839]: INFO nova.compute.manager [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Terminating instance [ 981.141964] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "refresh_cache-a262845a-0ae2-4e0e-9040-01f0ed37c95c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.141964] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "refresh_cache-a262845a-0ae2-4e0e-9040-01f0ed37c95c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.142327] env[61839]: DEBUG nova.network.neutron [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.334039] env[61839]: DEBUG nova.scheduler.client.report [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.464396] env[61839]: INFO nova.compute.manager [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Took 23.07 seconds to build instance. [ 981.497532] env[61839]: DEBUG oslo_vmware.api [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314874, 'name': PowerOffVM_Task, 'duration_secs': 0.328939} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.497843] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.498061] env[61839]: DEBUG nova.compute.manager [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 981.498887] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d1e35a-b661-47b2-9336-3737fced9a32 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.604988] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.605245] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.605444] env[61839]: DEBUG nova.network.neutron [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.605666] env[61839]: DEBUG nova.objects.instance [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'info_cache' on Instance uuid fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.664472] env[61839]: DEBUG nova.network.neutron [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.709557] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Completed reading data from the image iterator. {{(pid=61839) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 981.709880] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52384687-7e60-37d9-6d9b-d11af8e582c2/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 981.710975] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c112b563-d043-4262-a229-1426f8eec3f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.719700] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52384687-7e60-37d9-6d9b-d11af8e582c2/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 981.719894] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52384687-7e60-37d9-6d9b-d11af8e582c2/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 981.720165] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-7409ac4a-d601-4647-810c-fde765cf8044 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.722819] env[61839]: DEBUG nova.network.neutron [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.764525] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.764933] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.765085] env[61839]: DEBUG nova.compute.manager [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Going to confirm migration 3 {{(pid=61839) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 981.838378] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.838935] env[61839]: DEBUG nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 981.841479] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.102s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.842158] env[61839]: DEBUG nova.objects.instance [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 981.870269] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cd190d1b-1c78-4e1f-b1a0-df6c466aa0b3 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.870749] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cd190d1b-1c78-4e1f-b1a0-df6c466aa0b3 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.871160] env[61839]: DEBUG nova.objects.instance [None req-cd190d1b-1c78-4e1f-b1a0-df6c466aa0b3 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'flavor' on Instance uuid d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.955897] env[61839]: DEBUG oslo_vmware.rw_handles [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52384687-7e60-37d9-6d9b-d11af8e582c2/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 981.956161] env[61839]: INFO nova.virt.vmwareapi.images [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Downloaded image file data 5fe38bf0-d665-4992-a6d0-c15bcea2316d [ 981.957011] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192b0f5c-7e43-4e9f-95a7-97dbc524fe89 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.973423] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4d0a2e1-0649-414e-a6e9-a456893a9b36 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.586s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.973806] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d85f3772-193b-488e-8b32-2bf1b144e563 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.016415] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0e245029-295d-452a-a1bf-1e5628215f04 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.588s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.055898] env[61839]: INFO nova.virt.vmwareapi.images [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] The imported VM was unregistered [ 982.058332] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Caching image {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 982.058590] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Creating directory with path [datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.058895] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac1510ea-ef30-47f4-a0aa-ca207c53def5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.079014] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Created directory with path [datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.079238] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21/OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21.vmdk to [datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d/5fe38bf0-d665-4992-a6d0-c15bcea2316d.vmdk. {{(pid=61839) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 982.079518] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-80620f42-e171-427d-abb6-47a37cf9ca9a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.089142] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 982.089142] env[61839]: value = "task-1314876" [ 982.089142] env[61839]: _type = "Task" [ 982.089142] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.097755] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.110204] env[61839]: DEBUG nova.objects.base [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 982.225975] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "refresh_cache-a262845a-0ae2-4e0e-9040-01f0ed37c95c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.227646] env[61839]: DEBUG nova.compute.manager [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 982.227646] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.228101] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c60998-ec99-4b9e-86d6-0c0c2e462eb9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.237110] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.237509] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e219188f-e3c9-44f0-8df9-0fb579b42ea2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.244271] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 982.244271] env[61839]: value = "task-1314877" [ 982.244271] env[61839]: _type = "Task" [ 982.244271] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.253829] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.304669] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.304855] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.305092] env[61839]: DEBUG nova.network.neutron [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.305295] env[61839]: DEBUG nova.objects.instance [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lazy-loading 'info_cache' on Instance uuid 23ee24d5-bccd-497d-a53f-b9723fd9c707 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.349145] env[61839]: DEBUG nova.compute.utils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.350486] env[61839]: DEBUG nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 982.350652] env[61839]: DEBUG nova.network.neutron [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.375146] env[61839]: DEBUG nova.objects.instance [None req-cd190d1b-1c78-4e1f-b1a0-df6c466aa0b3 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'pci_requests' on Instance uuid d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.439230] env[61839]: DEBUG nova.policy [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '430b14eaa0e94ef39fb0f95269448ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25686a503d044467a1d641f14e14c65c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 982.609904] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.757453] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314877, 'name': PowerOffVM_Task, 'duration_secs': 0.294392} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.757958] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.758256] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.758610] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee3fd695-3e97-4a1f-a18b-24448a755a81 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.793489] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.793853] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.795393] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleting the datastore file [datastore1] a262845a-0ae2-4e0e-9040-01f0ed37c95c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.796140] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ebb38f3-229f-4a50-bc27-803b9f5367d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.803732] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 982.803732] env[61839]: value = "task-1314879" [ 982.803732] env[61839]: _type = "Task" [ 982.803732] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.820474] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.830901] env[61839]: DEBUG nova.network.neutron [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Successfully created port: 08ce136a-85a7-43c8-924c-0bef574f8bfc {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.851687] env[61839]: DEBUG oslo_concurrency.lockutils [None req-162b3bf6-5d8f-4f91-9087-bf5f3018ad49 tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.860021] env[61839]: DEBUG nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 982.878341] env[61839]: DEBUG nova.objects.base [None req-cd190d1b-1c78-4e1f-b1a0-df6c466aa0b3 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 982.879025] env[61839]: DEBUG nova.network.neutron [None req-cd190d1b-1c78-4e1f-b1a0-df6c466aa0b3 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.977680] env[61839]: DEBUG oslo_concurrency.lockutils [None req-cd190d1b-1c78-4e1f-b1a0-df6c466aa0b3 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.107s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.105353] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.111441] env[61839]: INFO nova.compute.manager [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Rebuilding instance [ 983.114448] env[61839]: DEBUG nova.network.neutron [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [{"id": "bc315481-8651-4be3-bdd5-269b569b2817", "address": "fa:16:3e:ce:61:f0", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc315481-86", "ovs_interfaceid": "bc315481-8651-4be3-bdd5-269b569b2817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.161934] env[61839]: DEBUG nova.compute.manager [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 983.163378] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d75292-90b5-40d0-ae3d-5c1f88bb869c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.168591] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.168940] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.316105] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.581037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.581037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.581037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.581037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.581037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.583862] env[61839]: INFO nova.compute.manager [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Terminating instance [ 983.588017] env[61839]: DEBUG nova.compute.manager [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 983.588017] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.588017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec4477c-54ea-409c-a728-a760bc90bbaa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.604146] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.610024] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.610024] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b701a20-6877-4140-98df-d98aa271c8b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.617683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.622024] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 983.622024] env[61839]: value = "task-1314880" [ 983.622024] env[61839]: _type = "Task" [ 983.622024] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.634396] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314880, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.678676] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.678892] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 983.688869] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.689251] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae3c2ca0-6e0e-48e3-83dc-a7825ac4ec3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.700309] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 983.700309] env[61839]: value = "task-1314881" [ 983.700309] env[61839]: _type = "Task" [ 983.700309] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.718067] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 983.718249] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.719167] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b6f99e-3d69-447e-a7e7-79dc21c242af {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.730819] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.731066] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2dc63627-e2aa-46dc-b4b4-7ce9a2ba8142 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.796559] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "603191b6-a4b0-451b-b98b-f3dbfb684300" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.796869] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.797076] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "603191b6-a4b0-451b-b98b-f3dbfb684300-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.797288] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.797548] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.801022] env[61839]: INFO nova.compute.manager [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Terminating instance [ 983.802142] env[61839]: DEBUG nova.compute.manager [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 983.802354] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.803276] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f70092-f9c6-4b55-887f-a9d354e5d627 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.817996] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.822182] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a73087b-1728-4a5c-9639-09b4ad5d1cb6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.825871] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.835478] env[61839]: DEBUG oslo_vmware.api [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 983.835478] env[61839]: value = "task-1314883" [ 983.835478] env[61839]: _type = "Task" [ 983.835478] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.846544] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.846799] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.846999] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleting the datastore file [datastore1] 73b83239-bbc8-41d1-aec3-2b4519c320af {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.850608] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73a0fa4b-3f96-4498-b51b-e8a37352e406 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.852707] env[61839]: DEBUG oslo_vmware.api [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.861189] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 983.861189] env[61839]: value = "task-1314884" [ 983.861189] env[61839]: _type = "Task" [ 983.861189] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.868251] env[61839]: DEBUG nova.network.neutron [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance_info_cache with network_info: [{"id": "56222a14-6e55-4e9b-a963-5d868763ad21", "address": "fa:16:3e:5e:f1:1e", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56222a14-6e", "ovs_interfaceid": "56222a14-6e55-4e9b-a963-5d868763ad21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.875441] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.005631] env[61839]: DEBUG nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 984.034693] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 984.035057] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 984.035300] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.035444] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 984.035597] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.035751] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 984.035972] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 984.036165] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 984.036345] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 984.036517] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 984.036722] env[61839]: DEBUG nova.virt.hardware [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 984.037777] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abee3e92-3496-40a6-a6a1-7c28856b022f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.048386] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a714b7-9553-4ef7-be80-65ac5fdf8b4c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.105226] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.122636] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.123237] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a6df9b5-c2b3-4781-b062-343225475cb5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.137618] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314880, 'name': PowerOffVM_Task, 'duration_secs': 0.422272} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.139201] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.139397] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.139736] env[61839]: DEBUG oslo_vmware.api [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 984.139736] env[61839]: value = "task-1314885" [ 984.139736] env[61839]: _type = "Task" [ 984.139736] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.139947] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fda38ba-e226-46df-9806-9a307ffa0709 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.152327] env[61839]: DEBUG oslo_vmware.api [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.185049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.185049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.185274] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 984.258337] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.258624] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.258897] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleting the datastore file [datastore1] 3c832102-cacc-4dd8-a336-2aa1d8bd8116 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.259278] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b6cd9c0-b265-49c4-929d-26772f6edf63 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.274741] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 984.274741] env[61839]: value = "task-1314887" [ 984.274741] env[61839]: _type = "Task" [ 984.274741] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.285535] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.322038] env[61839]: DEBUG oslo_vmware.api [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.167894} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.322038] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.322248] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.322581] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.323046] env[61839]: INFO nova.compute.manager [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Took 2.10 seconds to destroy the instance on the hypervisor. [ 984.323234] env[61839]: DEBUG oslo.service.loopingcall [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.323483] env[61839]: DEBUG nova.compute.manager [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 984.323608] env[61839]: DEBUG nova.network.neutron [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 984.348945] env[61839]: DEBUG oslo_vmware.api [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314883, 'name': PowerOffVM_Task, 'duration_secs': 0.48812} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.349291] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.349476] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.349770] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67eb11df-402f-4016-9945-7eda8b471929 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.354801] env[61839]: DEBUG nova.network.neutron [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 984.376119] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-23ee24d5-bccd-497d-a53f-b9723fd9c707" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.376436] env[61839]: DEBUG nova.objects.instance [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lazy-loading 'migration_context' on Instance uuid 23ee24d5-bccd-497d-a53f-b9723fd9c707 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.377664] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.467682] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.467682] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.467855] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleting the datastore file [datastore2] 603191b6-a4b0-451b-b98b-f3dbfb684300 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.468585] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4cba03f-6081-4e53-8caa-9c96f55d6187 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.478186] env[61839]: DEBUG oslo_vmware.api [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 984.478186] env[61839]: value = "task-1314889" [ 984.478186] env[61839]: _type = "Task" [ 984.478186] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.489614] env[61839]: DEBUG oslo_vmware.api [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.610661] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.654714] env[61839]: DEBUG oslo_vmware.api [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314885, 'name': PowerOnVM_Task, 'duration_secs': 0.512128} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.655057] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.655275] env[61839]: DEBUG nova.compute.manager [None req-8de975af-2807-4306-bdbb-44d27e8fab2e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 984.656710] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5aff78-1acd-4ad8-9773-64aef41ed2c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.671910] env[61839]: DEBUG oslo_concurrency.lockutils [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.672195] env[61839]: DEBUG oslo_concurrency.lockutils [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.762206] env[61839]: DEBUG nova.network.neutron [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Successfully updated port: 08ce136a-85a7-43c8-924c-0bef574f8bfc {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.788329] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.858618] env[61839]: DEBUG nova.network.neutron [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.875727] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.879936] env[61839]: DEBUG nova.objects.base [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Object Instance<23ee24d5-bccd-497d-a53f-b9723fd9c707> lazy-loaded attributes: info_cache,migration_context {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 984.880953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fc2993-28b5-4261-8d47-d0648d0812a1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.906602] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f212f84f-289b-465c-9e08-650a77714e46 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.915589] env[61839]: DEBUG oslo_vmware.api [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 984.915589] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520099eb-29eb-6012-bfca-77436bd9de0d" [ 984.915589] env[61839]: _type = "Task" [ 984.915589] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.927275] env[61839]: DEBUG oslo_vmware.api [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520099eb-29eb-6012-bfca-77436bd9de0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.938494] env[61839]: DEBUG nova.compute.manager [req-4728f5bd-0ec3-453c-9ac8-76d5bbaae998 req-a42c00fe-b93a-46a8-8e31-057ea4971a7e service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Received event network-vif-plugged-08ce136a-85a7-43c8-924c-0bef574f8bfc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 984.938789] env[61839]: DEBUG oslo_concurrency.lockutils [req-4728f5bd-0ec3-453c-9ac8-76d5bbaae998 req-a42c00fe-b93a-46a8-8e31-057ea4971a7e service nova] Acquiring lock "a87f3a17-0a97-4b47-bc95-eee5975f8203-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.939021] env[61839]: DEBUG oslo_concurrency.lockutils [req-4728f5bd-0ec3-453c-9ac8-76d5bbaae998 req-a42c00fe-b93a-46a8-8e31-057ea4971a7e service nova] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.939215] env[61839]: DEBUG oslo_concurrency.lockutils [req-4728f5bd-0ec3-453c-9ac8-76d5bbaae998 req-a42c00fe-b93a-46a8-8e31-057ea4971a7e service nova] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.939399] env[61839]: DEBUG nova.compute.manager [req-4728f5bd-0ec3-453c-9ac8-76d5bbaae998 req-a42c00fe-b93a-46a8-8e31-057ea4971a7e service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] No waiting events found dispatching network-vif-plugged-08ce136a-85a7-43c8-924c-0bef574f8bfc {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 984.939571] env[61839]: WARNING nova.compute.manager [req-4728f5bd-0ec3-453c-9ac8-76d5bbaae998 req-a42c00fe-b93a-46a8-8e31-057ea4971a7e service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Received unexpected event network-vif-plugged-08ce136a-85a7-43c8-924c-0bef574f8bfc for instance with vm_state building and task_state spawning. [ 984.990498] env[61839]: DEBUG oslo_vmware.api [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.052268] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.052771] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.053312] env[61839]: DEBUG nova.objects.instance [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'flavor' on Instance uuid d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.104133] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.176051] env[61839]: INFO nova.compute.manager [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Detaching volume 6d6015d8-0b91-403e-ae0f-19b4e71ccee2 [ 985.211561] env[61839]: INFO nova.virt.block_device [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Attempting to driver detach volume 6d6015d8-0b91-403e-ae0f-19b4e71ccee2 from mountpoint /dev/sdb [ 985.211957] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 985.212310] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281409', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'name': 'volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '694a5d4b-3673-406b-a24a-d37fad33e549', 'attached_at': '', 'detached_at': '', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'serial': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 985.213714] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f165e4a-2e64-4d3f-853c-384eaddd6227 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.249941] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa266ff-3c8d-43be-a268-5c399f489083 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.263938] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5908dc2f-b026-46eb-a05d-51f2282e2379 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.268460] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-a87f3a17-0a97-4b47-bc95-eee5975f8203" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.268684] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-a87f3a17-0a97-4b47-bc95-eee5975f8203" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.268929] env[61839]: DEBUG nova.network.neutron [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 985.307620] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9eaae7-adce-4cfc-a6b0-72a5e2d80724 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.322760] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.339903] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] The volume has not been displaced from its original location: [datastore1] volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2/volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 985.345831] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 985.349483] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac0349d2-d8d7-4248-95db-79942c0b2f2f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.371521] env[61839]: INFO nova.compute.manager [-] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Took 1.05 seconds to deallocate network for instance. [ 985.386188] env[61839]: DEBUG oslo_vmware.api [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 985.386188] env[61839]: value = "task-1314890" [ 985.386188] env[61839]: _type = "Task" [ 985.386188] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.395402] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.473323} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.396357] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.396731] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.397067] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.408866] env[61839]: DEBUG oslo_vmware.api [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314890, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.433546] env[61839]: DEBUG oslo_vmware.api [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520099eb-29eb-6012-bfca-77436bd9de0d, 'name': SearchDatastore_Task, 'duration_secs': 0.012868} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.434207] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.434207] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.489840] env[61839]: DEBUG oslo_vmware.api [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.665323} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.490252] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.490480] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.490704] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.490849] env[61839]: INFO nova.compute.manager [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Took 1.69 seconds to destroy the instance on the hypervisor. [ 985.491182] env[61839]: DEBUG oslo.service.loopingcall [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.491399] env[61839]: DEBUG nova.compute.manager [-] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 985.491497] env[61839]: DEBUG nova.network.neutron [-] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 985.609798] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314876, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.286404} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.610089] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21/OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21.vmdk to [datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d/5fe38bf0-d665-4992-a6d0-c15bcea2316d.vmdk. [ 985.610306] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Cleaning up location [datastore1] OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 985.610492] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9a9a8aa9-bf08-45d3-9f3f-656fb9b22c21 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.610758] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-785e18c6-4340-465d-9337-30130912c556 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.618631] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 985.618631] env[61839]: value = "task-1314891" [ 985.618631] env[61839]: _type = "Task" [ 985.618631] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.627274] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.629960] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.643150] env[61839]: DEBUG nova.objects.instance [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'pci_requests' on Instance uuid d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.692101] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.692374] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.692582] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.692768] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.692978] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.695171] env[61839]: INFO nova.compute.manager [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Terminating instance [ 985.697856] env[61839]: DEBUG nova.compute.manager [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 985.698089] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.698963] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4d35a3-808e-40ac-8497-fcc1a2838c3c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.706961] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.707267] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f74d7eb-9029-4786-9b4b-53d40c0d9569 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.715081] env[61839]: DEBUG oslo_vmware.api [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 985.715081] env[61839]: value = "task-1314892" [ 985.715081] env[61839]: _type = "Task" [ 985.715081] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.723912] env[61839]: DEBUG oslo_vmware.api [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.819454] env[61839]: DEBUG oslo_vmware.api [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.245341} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.819454] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.819454] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.819454] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.819454] env[61839]: INFO nova.compute.manager [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Took 2.23 seconds to destroy the instance on the hypervisor. [ 985.819454] env[61839]: DEBUG oslo.service.loopingcall [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.819454] env[61839]: DEBUG nova.compute.manager [-] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 985.819454] env[61839]: DEBUG nova.network.neutron [-] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 985.859072] env[61839]: DEBUG nova.network.neutron [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.886380] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.901548] env[61839]: DEBUG oslo_vmware.api [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314890, 'name': ReconfigVM_Task, 'duration_secs': 0.239301} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.904023] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 985.909183] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91f552e5-eb9e-4cc4-8261-9a8321ea2c16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.932972] env[61839]: DEBUG oslo_vmware.api [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 985.932972] env[61839]: value = "task-1314893" [ 985.932972] env[61839]: _type = "Task" [ 985.932972] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.946329] env[61839]: DEBUG oslo_vmware.api [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314893, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.067238] env[61839]: DEBUG nova.network.neutron [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Updating instance_info_cache with network_info: [{"id": "08ce136a-85a7-43c8-924c-0bef574f8bfc", "address": "fa:16:3e:ae:e3:30", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ce136a-85", "ovs_interfaceid": "08ce136a-85a7-43c8-924c-0bef574f8bfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.128435] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162776} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.130957] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.131315] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d/5fe38bf0-d665-4992-a6d0-c15bcea2316d.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.131524] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d/5fe38bf0-d665-4992-a6d0-c15bcea2316d.vmdk to [datastore1] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.132267] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.132373] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 986.132563] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eefb8869-dfe7-4a28-9c25-4fef6c423b61 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.138656] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.138855] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.139456] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.139657] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.139803] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 986.139976] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.145372] env[61839]: DEBUG nova.objects.base [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 986.145594] env[61839]: DEBUG nova.network.neutron [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 986.147706] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 986.147706] env[61839]: value = "task-1314894" [ 986.147706] env[61839]: _type = "Task" [ 986.147706] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.161328] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.199516] env[61839]: DEBUG nova.policy [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 986.228262] env[61839]: DEBUG oslo_vmware.api [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314892, 'name': PowerOffVM_Task, 'duration_secs': 0.179412} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.228545] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.228718] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 986.229010] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d3c1b83-53c5-4ec0-b430-38d964f6f8ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.241140] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476ea4f3-fb5a-4c33-8c76-482ca5f9e6d1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.249036] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f17d3c2-987f-4141-824c-d74bfee20ff1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.297697] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c358b4fa-7f83-4cf2-abd6-e1d242b562be {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.305983] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9cc4a4-0279-415a-88aa-7fb4ed837fd2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.319861] env[61839]: DEBUG nova.compute.provider_tree [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.432747] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 986.433079] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 986.433520] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleting the datastore file [datastore2] fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 986.433897] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-169595db-76d1-4d1e-abe8-2c81bfdf41b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.447306] env[61839]: DEBUG oslo_vmware.api [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314893, 'name': ReconfigVM_Task, 'duration_secs': 0.149446} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.449344] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281409', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'name': 'volume-6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '694a5d4b-3673-406b-a24a-d37fad33e549', 'attached_at': '', 'detached_at': '', 'volume_id': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2', 'serial': '6d6015d8-0b91-403e-ae0f-19b4e71ccee2'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 986.451685] env[61839]: DEBUG nova.network.neutron [-] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.453156] env[61839]: DEBUG oslo_vmware.api [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 986.453156] env[61839]: value = "task-1314896" [ 986.453156] env[61839]: _type = "Task" [ 986.453156] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.463216] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.463532] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.464326] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.466059] env[61839]: DEBUG nova.virt.hardware [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.467018] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e48549-b9d4-4e85-806f-c69b6ff14a4a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.471555] env[61839]: DEBUG nova.network.neutron [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Successfully created port: 7aee83a4-620a-48c2-a47b-7d47e05a7a07 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 986.476588] env[61839]: DEBUG oslo_vmware.api [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.482981] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613016a2-ba15-434b-9cc7-1b7ff190fe79 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.499766] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:55:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dbee357-54cc-4fa4-826b-24aa98397b45', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.507222] env[61839]: DEBUG oslo.service.loopingcall [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.507510] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.507736] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c9e5f78-701d-48b2-a4cd-1e39cef3a235 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.528459] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.528459] env[61839]: value = "task-1314897" [ 986.528459] env[61839]: _type = "Task" [ 986.528459] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.539666] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314897, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.569041] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-a87f3a17-0a97-4b47-bc95-eee5975f8203" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.569432] env[61839]: DEBUG nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Instance network_info: |[{"id": "08ce136a-85a7-43c8-924c-0bef574f8bfc", "address": "fa:16:3e:ae:e3:30", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ce136a-85", "ovs_interfaceid": "08ce136a-85a7-43c8-924c-0bef574f8bfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 986.569886] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:e3:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08ce136a-85a7-43c8-924c-0bef574f8bfc', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.577507] env[61839]: DEBUG oslo.service.loopingcall [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.577805] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.578105] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb2dc467-ad70-4e2f-86bc-8603a262f897 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.599575] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.599575] env[61839]: value = "task-1314898" [ 986.599575] env[61839]: _type = "Task" [ 986.599575] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.608454] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314898, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.643477] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.662641] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314894, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.705376] env[61839]: DEBUG nova.network.neutron [-] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.823782] env[61839]: DEBUG nova.scheduler.client.report [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.956809] env[61839]: INFO nova.compute.manager [-] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Took 1.47 seconds to deallocate network for instance. [ 986.978013] env[61839]: DEBUG oslo_vmware.api [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.356975} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.978013] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.978013] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.978013] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.978013] env[61839]: INFO nova.compute.manager [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Took 1.28 seconds to destroy the instance on the hypervisor. [ 986.978267] env[61839]: DEBUG oslo.service.loopingcall [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.978498] env[61839]: DEBUG nova.compute.manager [-] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 986.978660] env[61839]: DEBUG nova.network.neutron [-] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 987.011438] env[61839]: DEBUG nova.objects.instance [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'flavor' on Instance uuid 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.042986] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314897, 'name': CreateVM_Task, 'duration_secs': 0.403882} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.043225] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.043949] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.044166] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.044519] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 987.044801] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4308a59f-4be0-4624-8dfd-eba6daf4e738 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.051434] env[61839]: DEBUG nova.compute.manager [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Received event network-changed-08ce136a-85a7-43c8-924c-0bef574f8bfc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.051557] env[61839]: DEBUG nova.compute.manager [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Refreshing instance network info cache due to event network-changed-08ce136a-85a7-43c8-924c-0bef574f8bfc. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 987.051725] env[61839]: DEBUG oslo_concurrency.lockutils [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] Acquiring lock "refresh_cache-a87f3a17-0a97-4b47-bc95-eee5975f8203" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.051878] env[61839]: DEBUG oslo_concurrency.lockutils [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] Acquired lock "refresh_cache-a87f3a17-0a97-4b47-bc95-eee5975f8203" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.052064] env[61839]: DEBUG nova.network.neutron [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Refreshing network info cache for port 08ce136a-85a7-43c8-924c-0bef574f8bfc {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.055230] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 987.055230] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c81f9c-05e1-2846-5f23-717d03b26580" [ 987.055230] env[61839]: _type = "Task" [ 987.055230] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.067312] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c81f9c-05e1-2846-5f23-717d03b26580, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.113020] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314898, 'name': CreateVM_Task, 'duration_secs': 0.386934} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.113967] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.114131] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.161171] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314894, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.208321] env[61839]: INFO nova.compute.manager [-] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Took 1.39 seconds to deallocate network for instance. [ 987.340406] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.341333] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.469690] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.575138] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c81f9c-05e1-2846-5f23-717d03b26580, 'name': SearchDatastore_Task, 'duration_secs': 0.021603} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.575138] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.575138] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.575138] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.575138] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.575138] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.575138] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.575346] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 987.575527] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc294375-3d1c-481d-bdfb-795b6ed9e5b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.578134] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbba8840-e670-464b-baec-e6a3333f3537 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.586761] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 987.586761] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52726e45-ab26-180f-ab04-48e1bc31be28" [ 987.586761] env[61839]: _type = "Task" [ 987.586761] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.591778] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.591903] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.593245] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14939fc4-2e9f-4c71-90e2-ba60673e533c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.602489] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52726e45-ab26-180f-ab04-48e1bc31be28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.607830] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 987.607830] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d0d49a-0396-443a-7a63-2be4f2c21425" [ 987.607830] env[61839]: _type = "Task" [ 987.607830] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.619472] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d0d49a-0396-443a-7a63-2be4f2c21425, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.662914] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314894, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.716478] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.791284] env[61839]: DEBUG nova.network.neutron [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Updated VIF entry in instance network info cache for port 08ce136a-85a7-43c8-924c-0bef574f8bfc. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.791284] env[61839]: DEBUG nova.network.neutron [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Updating instance_info_cache with network_info: [{"id": "08ce136a-85a7-43c8-924c-0bef574f8bfc", "address": "fa:16:3e:ae:e3:30", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ce136a-85", "ovs_interfaceid": "08ce136a-85a7-43c8-924c-0bef574f8bfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.836587] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.402s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.839701] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.954s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.839948] env[61839]: DEBUG nova.objects.instance [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lazy-loading 'resources' on Instance uuid a262845a-0ae2-4e0e-9040-01f0ed37c95c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.844445] env[61839]: DEBUG nova.compute.utils [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 988.019866] env[61839]: DEBUG oslo_concurrency.lockutils [None req-edb44f49-3fe2-4367-947c-969ef125b33b tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.348s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.030155] env[61839]: DEBUG nova.network.neutron [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Successfully updated port: 7aee83a4-620a-48c2-a47b-7d47e05a7a07 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.032520] env[61839]: DEBUG nova.network.neutron [-] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.100394] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52726e45-ab26-180f-ab04-48e1bc31be28, 'name': SearchDatastore_Task, 'duration_secs': 0.014783} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.100712] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.100960] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.101218] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.120686] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d0d49a-0396-443a-7a63-2be4f2c21425, 'name': SearchDatastore_Task, 'duration_secs': 0.014375} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.121568] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15fc9aad-e20e-4f4f-a62b-16826bd7f44c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.131985] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 988.131985] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5249bf20-9db3-acdd-73c9-7bcf09b8784f" [ 988.131985] env[61839]: _type = "Task" [ 988.131985] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.142565] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5249bf20-9db3-acdd-73c9-7bcf09b8784f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.162429] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314894, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.293064] env[61839]: DEBUG oslo_concurrency.lockutils [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] Releasing lock "refresh_cache-a87f3a17-0a97-4b47-bc95-eee5975f8203" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.293376] env[61839]: DEBUG nova.compute.manager [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Received event network-vif-deleted-fc601f8f-cb33-41b2-9f00-9476cd3cbf01 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 988.293623] env[61839]: DEBUG nova.compute.manager [req-a3e7c34f-179a-40e6-b686-82139e3fe05b req-a66fdd88-7730-42c9-97ee-76f696cefd3a service nova] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Received event network-vif-deleted-de313fb8-8012-41f5-b060-5f843422a301 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 988.349330] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.400372] env[61839]: INFO nova.scheduler.client.report [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted allocation for migration 80376647-f9cc-4390-b3a3-e5cadc03cd80 [ 988.532091] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.532329] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.532521] env[61839]: DEBUG nova.network.neutron [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.537143] env[61839]: INFO nova.compute.manager [-] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Took 1.56 seconds to deallocate network for instance. [ 988.581355] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124cbdff-c6c7-4069-a2e9-b92aed131c1b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.591930] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e27eb8b-fa4b-4e91-9418-9d51e6b346ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.624373] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd9557c-52a0-4fd6-b16b-b9fd4a8b6e24 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.637361] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c333670-8bba-4d6d-9b41-8ec19b05134d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.649386] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5249bf20-9db3-acdd-73c9-7bcf09b8784f, 'name': SearchDatastore_Task, 'duration_secs': 0.024443} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.657723] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.658067] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.659035] env[61839]: DEBUG nova.compute.provider_tree [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.660951] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.661315] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.661575] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9018eb9b-fd88-4235-b211-1e5c7bc80318 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.669574] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d772a8bc-ec34-465a-9651-ef096b1c3672 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.681217] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314894, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.682528] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.682762] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.684520] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83c96c84-e479-42b5-9f9c-3ea245ef428b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.686975] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 988.686975] env[61839]: value = "task-1314899" [ 988.686975] env[61839]: _type = "Task" [ 988.686975] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.693146] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 988.693146] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de5076-bbfe-bcd4-838c-d5d05d0cc7ca" [ 988.693146] env[61839]: _type = "Task" [ 988.693146] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.696254] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.704418] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de5076-bbfe-bcd4-838c-d5d05d0cc7ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.909111] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3df55af6-5aa0-4404-b9ac-92a7809ebceb tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.144s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.046442] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.070103] env[61839]: WARNING nova.network.neutron [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] 41c98894-de91-45eb-a390-6217e0f9dca5 already exists in list: networks containing: ['41c98894-de91-45eb-a390-6217e0f9dca5']. ignoring it [ 989.162785] env[61839]: DEBUG nova.scheduler.client.report [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 989.171569] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314894, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.753265} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.171830] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/5fe38bf0-d665-4992-a6d0-c15bcea2316d/5fe38bf0-d665-4992-a6d0-c15bcea2316d.vmdk to [datastore1] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.172639] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5baddb18-d075-4e64-bb86-8aa53e97ff3b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.196229] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.199299] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49feec93-d7d7-44d4-88a3-c77ba3daf4f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.228762] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.229054] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52de5076-bbfe-bcd4-838c-d5d05d0cc7ca, 'name': SearchDatastore_Task, 'duration_secs': 0.027431} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.230875] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 989.230875] env[61839]: value = "task-1314900" [ 989.230875] env[61839]: _type = "Task" [ 989.230875] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.231098] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e1ce4f8-e1f4-4b33-81fb-7c458e7e151b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.239980] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 989.239980] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520cdb6e-83f2-2405-159e-2e2f41310a4d" [ 989.239980] env[61839]: _type = "Task" [ 989.239980] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.243219] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314900, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.251682] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520cdb6e-83f2-2405-159e-2e2f41310a4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.329797] env[61839]: DEBUG nova.compute.manager [req-e7fa1039-ec8b-4c8a-b68f-4561243ae2c4 req-275a49fe-d816-41df-878b-3500dbf2f649 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-vif-plugged-7aee83a4-620a-48c2-a47b-7d47e05a7a07 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.329797] env[61839]: DEBUG oslo_concurrency.lockutils [req-e7fa1039-ec8b-4c8a-b68f-4561243ae2c4 req-275a49fe-d816-41df-878b-3500dbf2f649 service nova] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.329959] env[61839]: DEBUG oslo_concurrency.lockutils [req-e7fa1039-ec8b-4c8a-b68f-4561243ae2c4 req-275a49fe-d816-41df-878b-3500dbf2f649 service nova] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.330154] env[61839]: DEBUG oslo_concurrency.lockutils [req-e7fa1039-ec8b-4c8a-b68f-4561243ae2c4 req-275a49fe-d816-41df-878b-3500dbf2f649 service nova] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.330403] env[61839]: DEBUG nova.compute.manager [req-e7fa1039-ec8b-4c8a-b68f-4561243ae2c4 req-275a49fe-d816-41df-878b-3500dbf2f649 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] No waiting events found dispatching network-vif-plugged-7aee83a4-620a-48c2-a47b-7d47e05a7a07 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 989.330591] env[61839]: WARNING nova.compute.manager [req-e7fa1039-ec8b-4c8a-b68f-4561243ae2c4 req-275a49fe-d816-41df-878b-3500dbf2f649 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received unexpected event network-vif-plugged-7aee83a4-620a-48c2-a47b-7d47e05a7a07 for instance with vm_state active and task_state None. [ 989.399993] env[61839]: DEBUG nova.network.neutron [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "address": "fa:16:3e:11:b9:34", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aee83a4-62", "ovs_interfaceid": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.415892] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.416180] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.416528] env[61839]: INFO nova.compute.manager [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Attaching volume 6d100a91-2536-4c04-8112-17210c05edfb to /dev/sdb [ 989.451874] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06a5f8d-999c-4698-a887-04ba2714087c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.459707] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbaf7963-149f-40c9-bce7-572cefdd23e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.479660] env[61839]: DEBUG nova.virt.block_device [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updating existing volume attachment record: aba06497-77db-405a-8b08-89ea861e36ef {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 989.667956] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.670449] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.027s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.670635] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.670795] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 989.671165] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.202s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.671406] env[61839]: DEBUG nova.objects.instance [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lazy-loading 'resources' on Instance uuid 603191b6-a4b0-451b-b98b-f3dbfb684300 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.673899] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a7d6cb-f902-4d89-b165-2cab22d85b7f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.683743] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5465bcdb-a85d-4224-9d7a-c5ccf78e39ba {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.700783] env[61839]: INFO nova.scheduler.client.report [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleted allocations for instance a262845a-0ae2-4e0e-9040-01f0ed37c95c [ 989.705536] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fb3594-e096-45a4-a2cb-087532f4e315 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.717796] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314899, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.721375] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea871fb-4b0b-4f7d-8dab-3406acd4643d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.754309] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180053MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 989.754478] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.769368] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314900, 'name': ReconfigVM_Task, 'duration_secs': 0.304896} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.773147] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 86525ea7-af75-4b10-85a1-c0fbab73ea5f/86525ea7-af75-4b10-85a1-c0fbab73ea5f.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.773909] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520cdb6e-83f2-2405-159e-2e2f41310a4d, 'name': SearchDatastore_Task, 'duration_secs': 0.035798} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.774464] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f240b7d-a01e-4030-8ccb-375e284ca292 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.776438] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.776722] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a87f3a17-0a97-4b47-bc95-eee5975f8203/a87f3a17-0a97-4b47-bc95-eee5975f8203.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.777026] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-774fec65-9673-4c27-8e1b-4a14874384da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.784702] env[61839]: DEBUG nova.compute.manager [req-057b5fc0-85b8-4c9b-bad8-d3dcf00c6969 req-8199214d-ec65-4f79-b1ea-798ee3b9ac5d service nova] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Received event network-vif-deleted-bc315481-8651-4be3-bdd5-269b569b2817 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.788102] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 989.788102] env[61839]: value = "task-1314903" [ 989.788102] env[61839]: _type = "Task" [ 989.788102] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.788770] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 989.788770] env[61839]: value = "task-1314902" [ 989.788770] env[61839]: _type = "Task" [ 989.788770] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.805432] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314902, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.806079] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.903398] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.904122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.904286] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.905621] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d5765c-2177-4f0f-bd26-5f00019c3bd7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.923350] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.923634] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.923806] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.923997] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.924169] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.924323] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.924534] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.924696] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.924866] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.925118] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.925316] env[61839]: DEBUG nova.virt.hardware [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.931586] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfiguring VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 989.931943] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27e3f414-b54c-4c91-8b48-97a83a76eba5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.951676] env[61839]: DEBUG oslo_vmware.api [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 989.951676] env[61839]: value = "task-1314904" [ 989.951676] env[61839]: _type = "Task" [ 989.951676] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.960234] env[61839]: DEBUG oslo_vmware.api [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314904, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.216907] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314899, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.163527} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.220243] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.220539] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.221157] env[61839]: DEBUG oslo_concurrency.lockutils [None req-89dd6587-ff08-49b7-ad7f-f8fceec9456a tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "a262845a-0ae2-4e0e-9040-01f0ed37c95c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.086s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.222534] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b997eb7-147f-4b55-8672-22dcbdce8777 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.232532] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 990.232532] env[61839]: value = "task-1314905" [ 990.232532] env[61839]: _type = "Task" [ 990.232532] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.250467] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.321750] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314903, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.322175] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314902, 'name': Rename_Task, 'duration_secs': 0.161386} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.326287] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.326809] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a124ad3-0e41-43e3-b8a2-31e20d8a16c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.335947] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 990.335947] env[61839]: value = "task-1314906" [ 990.335947] env[61839]: _type = "Task" [ 990.335947] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.349220] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.429396] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c33be5-0eb5-4e32-b4f7-1e3a729d769e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.438393] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aef2fa9-d167-49c1-be66-866464596d35 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.474605] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa93acd-d4dc-45e0-97b9-cf0e89868f71 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.487242] env[61839]: DEBUG oslo_vmware.api [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314904, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.489030] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3e4792-566e-40e3-a9c9-bf6630bcbab8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.504142] env[61839]: DEBUG nova.compute.provider_tree [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.691868] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.692204] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.692431] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.692623] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.692802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.695523] env[61839]: INFO nova.compute.manager [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Terminating instance [ 990.697742] env[61839]: DEBUG nova.compute.manager [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 990.698015] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.698916] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4326ddd-0d1c-483e-9d83-2a2c99f5ff33 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.708065] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.708323] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9218df21-04f9-4e77-89d3-144f93a8564a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.715255] env[61839]: DEBUG oslo_vmware.api [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 990.715255] env[61839]: value = "task-1314907" [ 990.715255] env[61839]: _type = "Task" [ 990.715255] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.724877] env[61839]: DEBUG oslo_vmware.api [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314907, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.741338] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155431} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.741625] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.742430] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd9d079-aa62-48d6-bcfd-58c33c34227d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.765015] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.765374] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-693d0c16-40a8-41b7-bcba-ea0a1e459085 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.795350] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 990.795350] env[61839]: value = "task-1314908" [ 990.795350] env[61839]: _type = "Task" [ 990.795350] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.804639] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558219} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.805382] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a87f3a17-0a97-4b47-bc95-eee5975f8203/a87f3a17-0a97-4b47-bc95-eee5975f8203.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.805667] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.805941] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9c1cea2-451e-4fbc-85c1-bb516ad2747a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.811692] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314908, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.818206] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 990.818206] env[61839]: value = "task-1314909" [ 990.818206] env[61839]: _type = "Task" [ 990.818206] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.829655] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.851359] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314906, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.981366] env[61839]: DEBUG oslo_vmware.api [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314904, 'name': ReconfigVM_Task, 'duration_secs': 0.649671} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.981868] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.982100] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfigured VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 991.007505] env[61839]: DEBUG nova.scheduler.client.report [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.226797] env[61839]: DEBUG oslo_vmware.api [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314907, 'name': PowerOffVM_Task, 'duration_secs': 0.174029} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.227130] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.227299] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.227713] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a20b4a4-73a1-4141-8abf-a053f7f7fd31 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.286837] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.287212] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.287466] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.287696] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.287898] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.290759] env[61839]: INFO nova.compute.manager [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Terminating instance [ 991.293066] env[61839]: DEBUG nova.compute.manager [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.293285] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.294312] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e4cb7d-3add-4a8a-9d71-3bc9e18a18cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.302746] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.303044] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.303307] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleting the datastore file [datastore1] 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.304031] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48231113-2a24-4d8e-b42b-730778c1f35e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.312468] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.312785] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.313487] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56743cbc-2956-4118-a052-7e1b18df1821 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.317971] env[61839]: DEBUG oslo_vmware.api [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 991.317971] env[61839]: value = "task-1314911" [ 991.317971] env[61839]: _type = "Task" [ 991.317971] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.322813] env[61839]: DEBUG oslo_vmware.api [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 991.322813] env[61839]: value = "task-1314912" [ 991.322813] env[61839]: _type = "Task" [ 991.322813] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.331982] env[61839]: DEBUG oslo_vmware.api [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.336546] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09018} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.339425] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.339771] env[61839]: DEBUG oslo_vmware.api [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314912, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.340466] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1907de3-6f17-446d-9933-79e20fd8d2f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.364029] env[61839]: DEBUG oslo_vmware.api [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314906, 'name': PowerOnVM_Task, 'duration_secs': 0.587982} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.373051] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] a87f3a17-0a97-4b47-bc95-eee5975f8203/a87f3a17-0a97-4b47-bc95-eee5975f8203.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.374249] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "75344275-bdf2-4526-a101-e62ec270dd72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.374489] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "75344275-bdf2-4526-a101-e62ec270dd72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.374683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "75344275-bdf2-4526-a101-e62ec270dd72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.374868] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "75344275-bdf2-4526-a101-e62ec270dd72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.375081] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "75344275-bdf2-4526-a101-e62ec270dd72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.376683] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.379499] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-410ec46c-60ed-4077-b23f-b09659ebe770 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.395205] env[61839]: INFO nova.compute.manager [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Terminating instance [ 991.397687] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "refresh_cache-75344275-bdf2-4526-a101-e62ec270dd72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.397916] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquired lock "refresh_cache-75344275-bdf2-4526-a101-e62ec270dd72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.398069] env[61839]: DEBUG nova.network.neutron [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.406170] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 991.406170] env[61839]: value = "task-1314913" [ 991.406170] env[61839]: _type = "Task" [ 991.406170] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.417480] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.486955] env[61839]: DEBUG oslo_concurrency.lockutils [None req-51647074-c578-4686-af75-4fafc2fc5db7 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.434s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.512449] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.514897] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.798s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.515389] env[61839]: DEBUG nova.objects.instance [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'resources' on Instance uuid 3c832102-cacc-4dd8-a336-2aa1d8bd8116 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.517181] env[61839]: DEBUG nova.compute.manager [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.518301] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722f525e-0270-431e-815d-10389b4386a2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.532724] env[61839]: INFO nova.scheduler.client.report [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleted allocations for instance 603191b6-a4b0-451b-b98b-f3dbfb684300 [ 991.542184] env[61839]: DEBUG nova.compute.manager [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-changed-7aee83a4-620a-48c2-a47b-7d47e05a7a07 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 991.543282] env[61839]: DEBUG nova.compute.manager [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing instance network info cache due to event network-changed-7aee83a4-620a-48c2-a47b-7d47e05a7a07. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 991.543282] env[61839]: DEBUG oslo_concurrency.lockutils [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.543282] env[61839]: DEBUG oslo_concurrency.lockutils [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.543282] env[61839]: DEBUG nova.network.neutron [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing network info cache for port 7aee83a4-620a-48c2-a47b-7d47e05a7a07 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.810686] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314908, 'name': ReconfigVM_Task, 'duration_secs': 0.831717} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.811038] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 73b83239-bbc8-41d1-aec3-2b4519c320af/73b83239-bbc8-41d1-aec3-2b4519c320af.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.811738] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abb72007-df5b-40d9-8209-4d9da638ab79 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.823311] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 991.823311] env[61839]: value = "task-1314915" [ 991.823311] env[61839]: _type = "Task" [ 991.823311] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.831394] env[61839]: DEBUG oslo_vmware.api [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192685} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.837900] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.838175] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.838255] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.838434] env[61839]: INFO nova.compute.manager [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Took 1.14 seconds to destroy the instance on the hypervisor. [ 991.838720] env[61839]: DEBUG oslo.service.loopingcall [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.839465] env[61839]: DEBUG nova.compute.manager [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 991.839592] env[61839]: DEBUG nova.network.neutron [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 991.845543] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314915, 'name': Rename_Task} progress is 10%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.851433] env[61839]: DEBUG oslo_vmware.api [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314912, 'name': PowerOffVM_Task, 'duration_secs': 0.363044} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.851790] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.852020] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.852430] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2abfe80-7b03-4fc2-ba76-efb6f1fb5be5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.905534] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "8e9bea05-d6d7-40a8-997d-8c952f596f75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.905937] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.923031] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314913, 'name': ReconfigVM_Task, 'duration_secs': 0.447748} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.923031] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Reconfigured VM instance instance-00000062 to attach disk [datastore2] a87f3a17-0a97-4b47-bc95-eee5975f8203/a87f3a17-0a97-4b47-bc95-eee5975f8203.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.923031] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-829354e9-e72d-495c-aefb-e0fc2733bfb4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.928754] env[61839]: DEBUG nova.network.neutron [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.933052] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 991.933052] env[61839]: value = "task-1314917" [ 991.933052] env[61839]: _type = "Task" [ 991.933052] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.944587] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314917, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.971667] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.972284] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.972504] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleting the datastore file [datastore2] 23ee24d5-bccd-497d-a53f-b9723fd9c707 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.972789] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be7a0a72-d8eb-4117-ab84-9c5ec9872df7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.981250] env[61839]: DEBUG oslo_vmware.api [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 991.981250] env[61839]: value = "task-1314918" [ 991.981250] env[61839]: _type = "Task" [ 991.981250] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.992222] env[61839]: DEBUG oslo_vmware.api [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.025712] env[61839]: DEBUG nova.network.neutron [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.040028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3eb64946-db7f-4575-9761-49a11077e028 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.145s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.054288] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c67b0a78-92e0-4ec2-bcc9-0c54d6296d00 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "603191b6-a4b0-451b-b98b-f3dbfb684300" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.257s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.278481] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3d4684-f5ab-46d7-ac54-f9ec38b59719 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.286710] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e320a0bc-edb4-4379-b87a-7a4927536520 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.319758] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9508d98-3660-4a50-a56a-5eb601f62025 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.332557] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342fd89e-5784-4aa2-8049-9b85e6f174ae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.339176] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314915, 'name': Rename_Task, 'duration_secs': 0.185707} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.340864] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.340864] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "5c29c188-a34b-4751-9f8b-166af7b15088" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.340864] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "5c29c188-a34b-4751-9f8b-166af7b15088" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.340864] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "5c29c188-a34b-4751-9f8b-166af7b15088-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.341059] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "5c29c188-a34b-4751-9f8b-166af7b15088-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.341129] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "5c29c188-a34b-4751-9f8b-166af7b15088-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.342571] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fb627eb-40fb-4bc9-be22-5ff1155c8092 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.345356] env[61839]: INFO nova.compute.manager [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Terminating instance [ 992.354634] env[61839]: DEBUG nova.compute.provider_tree [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.356197] env[61839]: DEBUG nova.compute.manager [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 992.356420] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.358100] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc752152-2313-4041-a87f-2ad62eb772af {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.363200] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 992.363200] env[61839]: value = "task-1314919" [ 992.363200] env[61839]: _type = "Task" [ 992.363200] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.369216] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.369798] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af921867-d45e-4bad-871a-4c3c100d78d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.374585] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.376506] env[61839]: DEBUG oslo_vmware.api [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 992.376506] env[61839]: value = "task-1314920" [ 992.376506] env[61839]: _type = "Task" [ 992.376506] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.385212] env[61839]: DEBUG oslo_vmware.api [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.409794] env[61839]: DEBUG nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 992.445206] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314917, 'name': Rename_Task, 'duration_secs': 0.180368} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.445785] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.446124] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1aa96278-4e9d-4100-9f24-f8824123087c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.455237] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 992.455237] env[61839]: value = "task-1314921" [ 992.455237] env[61839]: _type = "Task" [ 992.455237] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.466125] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.473886] env[61839]: DEBUG nova.network.neutron [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updated VIF entry in instance network info cache for port 7aee83a4-620a-48c2-a47b-7d47e05a7a07. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.474460] env[61839]: DEBUG nova.network.neutron [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "address": "fa:16:3e:11:b9:34", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aee83a4-62", "ovs_interfaceid": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.492204] env[61839]: DEBUG oslo_vmware.api [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162877} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.492537] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.492735] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.492914] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.493109] env[61839]: INFO nova.compute.manager [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Took 1.20 seconds to destroy the instance on the hypervisor. [ 992.493370] env[61839]: DEBUG oslo.service.loopingcall [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.493569] env[61839]: DEBUG nova.compute.manager [-] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 992.493690] env[61839]: DEBUG nova.network.neutron [-] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.528708] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Releasing lock "refresh_cache-75344275-bdf2-4526-a101-e62ec270dd72" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.529895] env[61839]: DEBUG nova.compute.manager [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 992.530037] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.531318] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c2d5ad-e6c3-43b4-9cbc-97a1b45698e8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.541546] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.542355] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccfdc2dd-cb67-4515-82f4-b61e3a677d1c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.552860] env[61839]: DEBUG oslo_vmware.api [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 992.552860] env[61839]: value = "task-1314922" [ 992.552860] env[61839]: _type = "Task" [ 992.552860] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.563189] env[61839]: DEBUG oslo_vmware.api [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.857992] env[61839]: DEBUG nova.network.neutron [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.861909] env[61839]: DEBUG nova.scheduler.client.report [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 992.879503] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314919, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.891092] env[61839]: DEBUG oslo_vmware.api [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314920, 'name': PowerOffVM_Task, 'duration_secs': 0.238753} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.891670] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.891855] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.892572] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd65a0f8-5830-4422-81bb-0572b9741e4e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.937795] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.956364] env[61839]: DEBUG nova.compute.manager [req-86224a62-2f1f-41d4-b7ca-13e59142e2dc req-9697cf67-4b66-4b06-bb8a-58f1fc201c47 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Received event network-vif-deleted-56222a14-6e55-4e9b-a963-5d868763ad21 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.956364] env[61839]: INFO nova.compute.manager [req-86224a62-2f1f-41d4-b7ca-13e59142e2dc req-9697cf67-4b66-4b06-bb8a-58f1fc201c47 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Neutron deleted interface 56222a14-6e55-4e9b-a963-5d868763ad21; detaching it from the instance and deleting it from the info cache [ 992.956579] env[61839]: DEBUG nova.network.neutron [req-86224a62-2f1f-41d4-b7ca-13e59142e2dc req-9697cf67-4b66-4b06-bb8a-58f1fc201c47 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.973418] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314921, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.977487] env[61839]: DEBUG oslo_concurrency.lockutils [req-d7d38a01-c0a2-46fe-bb0e-fd61f0dba7ad req-a6578107-2ec0-4122-b818-9e02a019dd99 service nova] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.992516] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.992775] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.993067] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleting the datastore file [datastore1] 5c29c188-a34b-4751-9f8b-166af7b15088 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.993390] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fff7dcdb-fcb4-4447-84f8-44aff588e2cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.001713] env[61839]: DEBUG oslo_vmware.api [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for the task: (returnval){ [ 993.001713] env[61839]: value = "task-1314924" [ 993.001713] env[61839]: _type = "Task" [ 993.001713] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.011531] env[61839]: DEBUG oslo_vmware.api [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.062694] env[61839]: DEBUG oslo_vmware.api [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314922, 'name': PowerOffVM_Task, 'duration_secs': 0.106962} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.062982] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 993.063173] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 993.063458] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ace82a28-8c96-47ad-9529-4fa49b862434 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.092600] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 993.093983] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 993.093983] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleting the datastore file [datastore2] 75344275-bdf2-4526-a101-e62ec270dd72 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.093983] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a20b3cda-2b53-498b-8193-ef3cdcba4d3e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.102377] env[61839]: DEBUG oslo_vmware.api [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for the task: (returnval){ [ 993.102377] env[61839]: value = "task-1314926" [ 993.102377] env[61839]: _type = "Task" [ 993.102377] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.113117] env[61839]: DEBUG oslo_vmware.api [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.293680] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-a9bbfd1a-7c62-42c2-ba9b-587b125ef832" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.294283] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-a9bbfd1a-7c62-42c2-ba9b-587b125ef832" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.294429] env[61839]: DEBUG nova.objects.instance [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'flavor' on Instance uuid d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.330398] env[61839]: DEBUG nova.network.neutron [-] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.361325] env[61839]: INFO nova.compute.manager [-] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Took 1.52 seconds to deallocate network for instance. [ 993.370349] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.855s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.371830] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.325s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.372041] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.373738] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.619s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.379167] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314919, 'name': PowerOnVM_Task, 'duration_secs': 0.644071} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.379534] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.379760] env[61839]: DEBUG nova.compute.manager [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 993.380604] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e68bf2d-3f1c-4028-a681-ff3cd6f66e37 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.393634] env[61839]: INFO nova.scheduler.client.report [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted allocations for instance fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4 [ 993.393634] env[61839]: INFO nova.scheduler.client.report [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted allocations for instance 3c832102-cacc-4dd8-a336-2aa1d8bd8116 [ 993.464998] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f996a301-18d7-4028-8cc3-1562fb76ef34 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.471153] env[61839]: DEBUG oslo_vmware.api [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314921, 'name': PowerOnVM_Task, 'duration_secs': 0.567831} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.472941] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.472941] env[61839]: INFO nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Took 9.47 seconds to spawn the instance on the hypervisor. [ 993.472941] env[61839]: DEBUG nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 993.473559] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4940db-0bdc-4c59-8d1b-0c82af6af463 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.478463] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1610661f-4e4d-4eb5-9f0a-a3d22f17551c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.510341] env[61839]: DEBUG nova.compute.manager [req-86224a62-2f1f-41d4-b7ca-13e59142e2dc req-9697cf67-4b66-4b06-bb8a-58f1fc201c47 service nova] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Detach interface failed, port_id=56222a14-6e55-4e9b-a963-5d868763ad21, reason: Instance 23ee24d5-bccd-497d-a53f-b9723fd9c707 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 993.519301] env[61839]: DEBUG oslo_vmware.api [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Task: {'id': task-1314924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225163} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.519553] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.519744] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.519928] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.520123] env[61839]: INFO nova.compute.manager [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Took 1.16 seconds to destroy the instance on the hypervisor. [ 993.520377] env[61839]: DEBUG oslo.service.loopingcall [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.520570] env[61839]: DEBUG nova.compute.manager [-] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 993.520664] env[61839]: DEBUG nova.network.neutron [-] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.613242] env[61839]: DEBUG oslo_vmware.api [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Task: {'id': task-1314926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103652} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.613242] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.613242] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.613455] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.613494] env[61839]: INFO nova.compute.manager [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Took 1.08 seconds to destroy the instance on the hypervisor. [ 993.613723] env[61839]: DEBUG oslo.service.loopingcall [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.613916] env[61839]: DEBUG nova.compute.manager [-] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 993.614019] env[61839]: DEBUG nova.network.neutron [-] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.629939] env[61839]: DEBUG nova.network.neutron [-] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 993.704377] env[61839]: DEBUG nova.compute.manager [req-8dbfa670-7280-439b-bf09-faf4c5fcbe9f req-c4206149-d422-4326-961e-479e0a0cee68 service nova] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Received event network-vif-deleted-4c2c0fb5-064b-4c53-9498-77b9ddc16884 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.832857] env[61839]: INFO nova.compute.manager [-] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Took 1.34 seconds to deallocate network for instance. [ 993.870822] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.896962] env[61839]: INFO nova.compute.manager [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] bringing vm to original state: 'stopped' [ 993.904537] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a09bae88-7ca6-454c-87de-086a44c03aaf tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3c832102-cacc-4dd8-a336-2aa1d8bd8116" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.324s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.906380] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c6766b65-c08e-4c6d-b0c1-01cca2ee36b8 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.214s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.913517] env[61839]: DEBUG nova.objects.instance [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'pci_requests' on Instance uuid d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.007066] env[61839]: INFO nova.compute.manager [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Took 25.83 seconds to build instance. [ 994.028839] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 994.029127] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281438', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'name': 'volume-6d100a91-2536-4c04-8112-17210c05edfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4a8c153-7585-4c78-8aa4-56077e0a7af6', 'attached_at': '', 'detached_at': '', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'serial': '6d100a91-2536-4c04-8112-17210c05edfb'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 994.030053] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31979c9e-e7cf-4185-b09d-a9f6a77a70bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.048239] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1568416d-7650-4605-a12d-bcd7db47003d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.076136] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] volume-6d100a91-2536-4c04-8112-17210c05edfb/volume-6d100a91-2536-4c04-8112-17210c05edfb.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 994.077483] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bfb98ae-6d8d-4de1-b231-307f80ecdd2d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.100556] env[61839]: DEBUG oslo_vmware.api [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 994.100556] env[61839]: value = "task-1314927" [ 994.100556] env[61839]: _type = "Task" [ 994.100556] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.109861] env[61839]: DEBUG oslo_vmware.api [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314927, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.133792] env[61839]: DEBUG nova.network.neutron [-] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.341397] env[61839]: DEBUG nova.network.neutron [-] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.343861] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.410745] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 5c29c188-a34b-4751-9f8b-166af7b15088 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.410905] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411047] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411173] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 694a5d4b-3673-406b-a24a-d37fad33e549 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411292] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d718d866-dd6c-4332-b63a-be6850a5a785 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411661] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance d4a8c153-7585-4c78-8aa4-56077e0a7af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411661] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411661] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 75344275-bdf2-4526-a101-e62ec270dd72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411814] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 86525ea7-af75-4b10-85a1-c0fbab73ea5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411853] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 23ee24d5-bccd-497d-a53f-b9723fd9c707 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.411949] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 73b83239-bbc8-41d1-aec3-2b4519c320af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.412072] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a87f3a17-0a97-4b47-bc95-eee5975f8203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.416419] env[61839]: DEBUG nova.objects.base [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 994.416628] env[61839]: DEBUG nova.network.neutron [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.480504] env[61839]: DEBUG nova.policy [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 994.509529] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3560b6a3-38e1-4082-80dc-ba1e60a79e5a tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.639s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.593755] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.594792] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.594792] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.594792] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.594937] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.598443] env[61839]: INFO nova.compute.manager [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Terminating instance [ 994.605429] env[61839]: DEBUG nova.compute.manager [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 994.605628] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.606421] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84748e1-a57c-44f7-936b-e6474417e3c4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.615312] env[61839]: DEBUG oslo_vmware.api [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314927, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.617395] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.617622] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bdd5763-6482-4312-8b16-1362139a4808 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.624546] env[61839]: DEBUG oslo_vmware.api [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 994.624546] env[61839]: value = "task-1314928" [ 994.624546] env[61839]: _type = "Task" [ 994.624546] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.634170] env[61839]: DEBUG oslo_vmware.api [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.637845] env[61839]: INFO nova.compute.manager [-] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Took 1.02 seconds to deallocate network for instance. [ 994.677695] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90229fc-90b0-4a6e-95ec-0d256956646d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.686860] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-986af452-4deb-4a42-b972-f088edd909bc tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Suspending the VM {{(pid=61839) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 994.687221] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-fcdb8322-68f7-4d46-8265-c5c4be3b2b28 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.696554] env[61839]: DEBUG oslo_vmware.api [None req-986af452-4deb-4a42-b972-f088edd909bc tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 994.696554] env[61839]: value = "task-1314929" [ 994.696554] env[61839]: _type = "Task" [ 994.696554] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.706264] env[61839]: DEBUG oslo_vmware.api [None req-986af452-4deb-4a42-b972-f088edd909bc tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314929, 'name': SuspendVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.845113] env[61839]: INFO nova.compute.manager [-] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Took 1.32 seconds to deallocate network for instance. [ 994.908145] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "73b83239-bbc8-41d1-aec3-2b4519c320af" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.908570] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.908930] env[61839]: DEBUG nova.compute.manager [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 994.909981] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924ad804-43c4-4115-9944-fe5e25fa5da7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.915423] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 8e9bea05-d6d7-40a8-997d-8c952f596f75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.915824] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 994.916126] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 994.921844] env[61839]: DEBUG nova.compute.manager [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61839) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 994.926080] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.926376] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d00d71de-5f68-4d3e-bda8-b81c39c62ae5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.935954] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 994.935954] env[61839]: value = "task-1314930" [ 994.935954] env[61839]: _type = "Task" [ 994.935954] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.951911] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.098977] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa05cea8-e329-44de-a920-4626fe72536d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.114752] env[61839]: DEBUG oslo_vmware.api [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314927, 'name': ReconfigVM_Task, 'duration_secs': 0.783918} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.115468] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Reconfigured VM instance instance-0000005a to attach disk [datastore2] volume-6d100a91-2536-4c04-8112-17210c05edfb/volume-6d100a91-2536-4c04-8112-17210c05edfb.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 995.120928] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f5e409-f343-4ae1-a20b-8c63afec1637 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.124669] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62fe1fe6-174c-4353-a163-27552370f919 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.165441] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.168447] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6df91df-2a5c-46ed-b17b-3b89c063cf3f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.171796] env[61839]: DEBUG oslo_vmware.api [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 995.171796] env[61839]: value = "task-1314931" [ 995.171796] env[61839]: _type = "Task" [ 995.171796] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.174577] env[61839]: DEBUG oslo_vmware.api [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314928, 'name': PowerOffVM_Task, 'duration_secs': 0.224316} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.178541] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.178866] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.181505] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba7ec7ae-8223-4335-9480-92fafef45449 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.183854] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b7fb57-87be-4545-9a2b-1b85aab22f0c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.191304] env[61839]: DEBUG oslo_vmware.api [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.204123] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.217500] env[61839]: DEBUG oslo_vmware.api [None req-986af452-4deb-4a42-b972-f088edd909bc tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314929, 'name': SuspendVM_Task} progress is 58%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.281708] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.282017] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.282279] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleting the datastore file [datastore2] 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.282582] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92ea999f-6e28-4166-9d48-8f375c3e7321 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.289859] env[61839]: DEBUG oslo_vmware.api [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 995.289859] env[61839]: value = "task-1314933" [ 995.289859] env[61839]: _type = "Task" [ 995.289859] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.301045] env[61839]: DEBUG oslo_vmware.api [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.351077] env[61839]: DEBUG nova.compute.manager [req-e40788be-bf2b-446f-ac73-0c0f60a24857 req-c28b9c3c-fad3-47b8-80f8-e7898fdab106 service nova] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Received event network-vif-deleted-2d6e228a-75ff-4bff-bc8d-bdde3218cf40 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.352195] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.449768] env[61839]: DEBUG oslo_vmware.api [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314930, 'name': PowerOffVM_Task, 'duration_secs': 0.344975} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.450098] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.450309] env[61839]: DEBUG nova.compute.manager [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 995.451254] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3a23aa-fdd8-4690-91e6-32535808e3a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.630404] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.630666] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.686266] env[61839]: DEBUG oslo_vmware.api [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314931, 'name': ReconfigVM_Task, 'duration_secs': 0.190037} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.686637] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281438', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'name': 'volume-6d100a91-2536-4c04-8112-17210c05edfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4a8c153-7585-4c78-8aa4-56077e0a7af6', 'attached_at': '', 'detached_at': '', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'serial': '6d100a91-2536-4c04-8112-17210c05edfb'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 995.713293] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 995.716666] env[61839]: DEBUG oslo_concurrency.lockutils [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.716905] env[61839]: DEBUG oslo_concurrency.lockutils [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.717117] env[61839]: DEBUG nova.compute.manager [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 995.717503] env[61839]: DEBUG oslo_vmware.api [None req-986af452-4deb-4a42-b972-f088edd909bc tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314929, 'name': SuspendVM_Task, 'duration_secs': 0.725358} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.719074] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced10dac-00c3-48d3-85a8-eca72cbe7ce1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.722326] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-986af452-4deb-4a42-b972-f088edd909bc tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Suspended the VM {{(pid=61839) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 995.722503] env[61839]: DEBUG nova.compute.manager [None req-986af452-4deb-4a42-b972-f088edd909bc tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 995.723307] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31cd08a-0cb6-4e95-9281-9983b009471d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.730667] env[61839]: DEBUG nova.compute.manager [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61839) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 995.731305] env[61839]: DEBUG nova.objects.instance [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'flavor' on Instance uuid a87f3a17-0a97-4b47-bc95-eee5975f8203 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.800427] env[61839]: DEBUG oslo_vmware.api [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156683} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.800566] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.800725] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.800905] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.801210] env[61839]: INFO nova.compute.manager [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Took 1.20 seconds to destroy the instance on the hypervisor. [ 995.801506] env[61839]: DEBUG oslo.service.loopingcall [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.801718] env[61839]: DEBUG nova.compute.manager [-] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 995.801816] env[61839]: DEBUG nova.network.neutron [-] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.898353] env[61839]: DEBUG nova.compute.manager [req-2c2b0626-fbb6-4541-8f18-e36de752945b req-4bad7793-bf52-473f-87eb-e54d07aadff0 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-vif-plugged-a9bbfd1a-7c62-42c2-ba9b-587b125ef832 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.898636] env[61839]: DEBUG oslo_concurrency.lockutils [req-2c2b0626-fbb6-4541-8f18-e36de752945b req-4bad7793-bf52-473f-87eb-e54d07aadff0 service nova] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.898806] env[61839]: DEBUG oslo_concurrency.lockutils [req-2c2b0626-fbb6-4541-8f18-e36de752945b req-4bad7793-bf52-473f-87eb-e54d07aadff0 service nova] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.898981] env[61839]: DEBUG oslo_concurrency.lockutils [req-2c2b0626-fbb6-4541-8f18-e36de752945b req-4bad7793-bf52-473f-87eb-e54d07aadff0 service nova] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.899311] env[61839]: DEBUG nova.compute.manager [req-2c2b0626-fbb6-4541-8f18-e36de752945b req-4bad7793-bf52-473f-87eb-e54d07aadff0 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] No waiting events found dispatching network-vif-plugged-a9bbfd1a-7c62-42c2-ba9b-587b125ef832 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 995.899497] env[61839]: WARNING nova.compute.manager [req-2c2b0626-fbb6-4541-8f18-e36de752945b req-4bad7793-bf52-473f-87eb-e54d07aadff0 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received unexpected event network-vif-plugged-a9bbfd1a-7c62-42c2-ba9b-587b125ef832 for instance with vm_state active and task_state None. [ 995.963736] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.055s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.000131] env[61839]: DEBUG nova.network.neutron [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Successfully updated port: a9bbfd1a-7c62-42c2-ba9b-587b125ef832 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.132852] env[61839]: DEBUG nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 996.219416] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 996.219639] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.846s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.219936] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.282s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.221627] env[61839]: INFO nova.compute.claims [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.240692] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.242634] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59c9dc4e-87cb-4de1-a2f4-6b13f017e762 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.251541] env[61839]: DEBUG oslo_vmware.api [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 996.251541] env[61839]: value = "task-1314934" [ 996.251541] env[61839]: _type = "Task" [ 996.251541] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.277376] env[61839]: DEBUG oslo_vmware.api [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314934, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.473657] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.503054] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.503054] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.503054] env[61839]: DEBUG nova.network.neutron [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 996.531734] env[61839]: DEBUG nova.network.neutron [-] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.659081] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.730029] env[61839]: DEBUG nova.objects.instance [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lazy-loading 'flavor' on Instance uuid d4a8c153-7585-4c78-8aa4-56077e0a7af6 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.765817] env[61839]: DEBUG oslo_vmware.api [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314934, 'name': PowerOffVM_Task, 'duration_secs': 0.248848} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.765817] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.765817] env[61839]: DEBUG nova.compute.manager [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 996.765817] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc7d80-86d7-40f1-961c-291e2d18a9bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.033725] env[61839]: INFO nova.compute.manager [-] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Took 1.23 seconds to deallocate network for instance. [ 997.051212] env[61839]: WARNING nova.network.neutron [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] 41c98894-de91-45eb-a390-6217e0f9dca5 already exists in list: networks containing: ['41c98894-de91-45eb-a390-6217e0f9dca5']. ignoring it [ 997.051426] env[61839]: WARNING nova.network.neutron [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] 41c98894-de91-45eb-a390-6217e0f9dca5 already exists in list: networks containing: ['41c98894-de91-45eb-a390-6217e0f9dca5']. ignoring it [ 997.236818] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17b98ee1-69c2-41fe-9855-70a82fddaa5f tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.821s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.279492] env[61839]: DEBUG oslo_concurrency.lockutils [None req-74ce37d6-0506-412a-8632-b996d7e53d27 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.562s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.284373] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "73b83239-bbc8-41d1-aec3-2b4519c320af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.285322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.285322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "73b83239-bbc8-41d1-aec3-2b4519c320af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.285322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.285322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.287298] env[61839]: INFO nova.compute.manager [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Terminating instance [ 997.289159] env[61839]: DEBUG nova.compute.manager [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 997.289373] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 997.290582] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99361b71-439a-4171-a1ea-0ec90893df0f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.302188] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 997.302680] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20f2ce4f-d24b-4ba0-9799-5ed4cd22b9d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.384705] env[61839]: DEBUG nova.compute.manager [req-8ec7471a-1abc-40b8-a828-0ab6748cee6a req-fe31a8a8-fabc-48e5-b89e-afb281deabd5 service nova] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Received event network-vif-deleted-d227bf3a-c4a3-4ba4-9df9-4c7ab460feb2 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.437790] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26288121-c548-48f5-aaf9-d192e86f89dc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.445885] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c8b2d0-6be8-45b0-8f47-ba2210457903 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.479083] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321766da-6249-4e87-8497-49d6dd25f62a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.497148] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.497492] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.497799] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleting the datastore file [datastore2] 73b83239-bbc8-41d1-aec3-2b4519c320af {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.499752] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acf8b162-adf8-46f4-bea4-43c7ab2a8fc0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.502923] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8d4467-c88b-4041-9704-198b66efcd16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.521158] env[61839]: DEBUG nova.compute.provider_tree [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.524212] env[61839]: DEBUG oslo_vmware.api [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 997.524212] env[61839]: value = "task-1314936" [ 997.524212] env[61839]: _type = "Task" [ 997.524212] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.532769] env[61839]: DEBUG oslo_vmware.api [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.540435] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.599598] env[61839]: DEBUG nova.network.neutron [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "address": "fa:16:3e:11:b9:34", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aee83a4-62", "ovs_interfaceid": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a9bbfd1a-7c62-42c2-ba9b-587b125ef832", "address": "fa:16:3e:61:7c:34", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9bbfd1a-7c", "ovs_interfaceid": "a9bbfd1a-7c62-42c2-ba9b-587b125ef832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.672841] env[61839]: INFO nova.compute.manager [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Resuming [ 997.673532] env[61839]: DEBUG nova.objects.instance [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lazy-loading 'flavor' on Instance uuid 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.985058] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.985371] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.025620] env[61839]: DEBUG nova.scheduler.client.report [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 998.039578] env[61839]: DEBUG oslo_vmware.api [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159866} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.039834] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.040042] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.040236] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.040411] env[61839]: INFO nova.compute.manager [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Took 0.75 seconds to destroy the instance on the hypervisor. [ 998.040657] env[61839]: DEBUG oslo.service.loopingcall [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.041425] env[61839]: DEBUG nova.compute.manager [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 998.041518] env[61839]: DEBUG nova.network.neutron [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 998.046908] env[61839]: DEBUG nova.compute.manager [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-changed-a9bbfd1a-7c62-42c2-ba9b-587b125ef832 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 998.047174] env[61839]: DEBUG nova.compute.manager [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing instance network info cache due to event network-changed-a9bbfd1a-7c62-42c2-ba9b-587b125ef832. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 998.047316] env[61839]: DEBUG oslo_concurrency.lockutils [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.103532] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.104292] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.104292] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.104533] env[61839]: DEBUG oslo_concurrency.lockutils [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.104717] env[61839]: DEBUG nova.network.neutron [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Refreshing network info cache for port a9bbfd1a-7c62-42c2-ba9b-587b125ef832 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 998.106556] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d90fac-12af-406d-8ae8-19baa7b9b71d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.128560] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 998.128802] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 998.128966] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.129172] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 998.129330] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.129486] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 998.129698] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 998.129863] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 998.130045] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 998.130222] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 998.130412] env[61839]: DEBUG nova.virt.hardware [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 998.136619] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfiguring VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 998.137138] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19699638-21c7-436d-8e7d-04bed946a716 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.154913] env[61839]: DEBUG oslo_vmware.api [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 998.154913] env[61839]: value = "task-1314937" [ 998.154913] env[61839]: _type = "Task" [ 998.154913] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.163266] env[61839]: DEBUG oslo_vmware.api [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314937, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.380347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.380636] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.380852] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "a87f3a17-0a97-4b47-bc95-eee5975f8203-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.381127] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.381233] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.383686] env[61839]: INFO nova.compute.manager [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Terminating instance [ 998.385635] env[61839]: DEBUG nova.compute.manager [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 998.385831] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.387219] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa74021-73ac-4967-b13a-843faaca40f9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.396430] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.396663] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a77138b-0fb6-4baa-90bd-4979e0df8027 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.462206] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.462445] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.462637] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleting the datastore file [datastore2] a87f3a17-0a97-4b47-bc95-eee5975f8203 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.463027] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8544fc56-fd13-49bd-821d-f7e7f45d32e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.469461] env[61839]: DEBUG oslo_vmware.api [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 998.469461] env[61839]: value = "task-1314939" [ 998.469461] env[61839]: _type = "Task" [ 998.469461] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.479807] env[61839]: DEBUG oslo_vmware.api [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.488675] env[61839]: INFO nova.compute.manager [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Detaching volume 6d100a91-2536-4c04-8112-17210c05edfb [ 998.525432] env[61839]: INFO nova.virt.block_device [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Attempting to driver detach volume 6d100a91-2536-4c04-8112-17210c05edfb from mountpoint /dev/sdb [ 998.525696] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 998.525912] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281438', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'name': 'volume-6d100a91-2536-4c04-8112-17210c05edfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4a8c153-7585-4c78-8aa4-56077e0a7af6', 'attached_at': '', 'detached_at': '', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'serial': '6d100a91-2536-4c04-8112-17210c05edfb'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 998.526855] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392681c7-393c-44ff-922b-e97ba1c96ae7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.548786] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.549346] env[61839]: DEBUG nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 998.552713] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.682s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.552979] env[61839]: DEBUG nova.objects.instance [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'resources' on Instance uuid 694a5d4b-3673-406b-a24a-d37fad33e549 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.554970] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187e6f94-7e3a-453b-92c9-f819f0272b32 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.565042] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11701e3e-d5fd-4c77-8aaa-a84cc85706d1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.591357] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34b72a9-74b7-4882-bcc8-5b2ba93e00dc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.608883] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] The volume has not been displaced from its original location: [datastore2] volume-6d100a91-2536-4c04-8112-17210c05edfb/volume-6d100a91-2536-4c04-8112-17210c05edfb.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 998.615682] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 998.618228] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-345c534a-e18c-4c17-89f6-e070b0919771 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.641291] env[61839]: DEBUG oslo_vmware.api [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 998.641291] env[61839]: value = "task-1314940" [ 998.641291] env[61839]: _type = "Task" [ 998.641291] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.651069] env[61839]: DEBUG oslo_vmware.api [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.665607] env[61839]: DEBUG oslo_vmware.api [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314937, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.681615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.681615] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquired lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.681615] env[61839]: DEBUG nova.network.neutron [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.859277] env[61839]: DEBUG nova.network.neutron [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.887546] env[61839]: DEBUG nova.network.neutron [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updated VIF entry in instance network info cache for port a9bbfd1a-7c62-42c2-ba9b-587b125ef832. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 998.888160] env[61839]: DEBUG nova.network.neutron [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "address": "fa:16:3e:11:b9:34", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aee83a4-62", "ovs_interfaceid": "7aee83a4-620a-48c2-a47b-7d47e05a7a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a9bbfd1a-7c62-42c2-ba9b-587b125ef832", "address": "fa:16:3e:61:7c:34", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9bbfd1a-7c", "ovs_interfaceid": "a9bbfd1a-7c62-42c2-ba9b-587b125ef832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.980239] env[61839]: DEBUG oslo_vmware.api [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314939, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160769} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.980505] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.980696] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.980879] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.981072] env[61839]: INFO nova.compute.manager [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Took 0.60 seconds to destroy the instance on the hypervisor. [ 998.981328] env[61839]: DEBUG oslo.service.loopingcall [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.981528] env[61839]: DEBUG nova.compute.manager [-] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 998.981621] env[61839]: DEBUG nova.network.neutron [-] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 999.054446] env[61839]: DEBUG nova.compute.utils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.056712] env[61839]: DEBUG nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 999.056946] env[61839]: DEBUG nova.network.neutron [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.110372] env[61839]: DEBUG nova.policy [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8677a31386a54087b2328734c2eadeb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8935bcc7ee644cb7a2a33557a708189c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 999.154905] env[61839]: DEBUG oslo_vmware.api [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314940, 'name': ReconfigVM_Task, 'duration_secs': 0.236134} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.154905] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 999.158859] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bbbb8ea-612d-40cc-9e46-984545ddca61 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.183438] env[61839]: DEBUG oslo_vmware.api [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314937, 'name': ReconfigVM_Task, 'duration_secs': 0.544238} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.187503] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.187743] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfigured VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 999.192953] env[61839]: DEBUG oslo_vmware.api [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 999.192953] env[61839]: value = "task-1314941" [ 999.192953] env[61839]: _type = "Task" [ 999.192953] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.207519] env[61839]: DEBUG oslo_vmware.api [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.305083] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957c0b85-e07e-4fd2-88f9-e0cb6ee335a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.313719] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d80944-bbde-4660-852a-09665aa989ca {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.352174] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df89af00-4f8c-4d35-8227-299bbd155c7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.362720] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0018b010-628f-44fe-9409-2a55cfd6364d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.368973] env[61839]: INFO nova.compute.manager [-] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Took 1.33 seconds to deallocate network for instance. [ 999.369655] env[61839]: DEBUG nova.network.neutron [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Successfully created port: c59a02ca-52d1-49bd-a536-7eadb7307eb4 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.386679] env[61839]: DEBUG nova.compute.provider_tree [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.394688] env[61839]: DEBUG oslo_concurrency.lockutils [req-bb56fcf6-b46f-4121-9a71-671bc5aeda1e req-3a567e09-a71d-4ed2-9a81-21efadfa23ff service nova] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.418776] env[61839]: DEBUG nova.compute.manager [req-8e65974d-f330-49aa-bf9b-555d75b207a5 req-fb471bcb-6637-4b4d-a4b6-6c6a8bb5065b service nova] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Received event network-vif-deleted-3dbee357-54cc-4fa4-826b-24aa98397b45 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.418978] env[61839]: DEBUG nova.compute.manager [req-8e65974d-f330-49aa-bf9b-555d75b207a5 req-fb471bcb-6637-4b4d-a4b6-6c6a8bb5065b service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Received event network-vif-deleted-08ce136a-85a7-43c8-924c-0bef574f8bfc {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.419184] env[61839]: INFO nova.compute.manager [req-8e65974d-f330-49aa-bf9b-555d75b207a5 req-fb471bcb-6637-4b4d-a4b6-6c6a8bb5065b service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Neutron deleted interface 08ce136a-85a7-43c8-924c-0bef574f8bfc; detaching it from the instance and deleting it from the info cache [ 999.419366] env[61839]: DEBUG nova.network.neutron [req-8e65974d-f330-49aa-bf9b-555d75b207a5 req-fb471bcb-6637-4b4d-a4b6-6c6a8bb5065b service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.546231] env[61839]: DEBUG nova.network.neutron [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [{"id": "ef8176cf-7494-44f4-a600-7dedff162419", "address": "fa:16:3e:c1:30:b3", "network": {"id": "8bda8ac4-b34c-4577-ae5e-07845e9e7428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-74282814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7a300fe2748456bb4a522a4d7c0d0f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef8176cf-74", "ovs_interfaceid": "ef8176cf-7494-44f4-a600-7dedff162419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.563375] env[61839]: DEBUG nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 999.698475] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f9f5c2a8-403c-41f6-a435-d8a3d0ddf2bb tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-a9bbfd1a-7c62-42c2-ba9b-587b125ef832" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.404s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.709263] env[61839]: DEBUG oslo_vmware.api [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314941, 'name': ReconfigVM_Task, 'duration_secs': 0.144171} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.709517] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281438', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'name': 'volume-6d100a91-2536-4c04-8112-17210c05edfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd4a8c153-7585-4c78-8aa4-56077e0a7af6', 'attached_at': '', 'detached_at': '', 'volume_id': '6d100a91-2536-4c04-8112-17210c05edfb', 'serial': '6d100a91-2536-4c04-8112-17210c05edfb'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 999.856110] env[61839]: DEBUG nova.network.neutron [-] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.890453] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.891056] env[61839]: DEBUG nova.scheduler.client.report [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.923267] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2af46003-a5dc-4cc9-b78c-d6378e4aacad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.933565] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cd9913-0f1a-4913-a073-32c58a1a6b69 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.962024] env[61839]: DEBUG nova.compute.manager [req-8e65974d-f330-49aa-bf9b-555d75b207a5 req-fb471bcb-6637-4b4d-a4b6-6c6a8bb5065b service nova] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Detach interface failed, port_id=08ce136a-85a7-43c8-924c-0bef574f8bfc, reason: Instance a87f3a17-0a97-4b47-bc95-eee5975f8203 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1000.049237] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Releasing lock "refresh_cache-86525ea7-af75-4b10-85a1-c0fbab73ea5f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.050310] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbf42b7-6b1e-41f3-8705-b68b24f45a45 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.057397] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Resuming the VM {{(pid=61839) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1000.057665] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49a1ef68-7e34-40c4-993b-74196565c442 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.064200] env[61839]: DEBUG oslo_vmware.api [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 1000.064200] env[61839]: value = "task-1314942" [ 1000.064200] env[61839]: _type = "Task" [ 1000.064200] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.075502] env[61839]: DEBUG oslo_vmware.api [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.251462] env[61839]: DEBUG nova.objects.instance [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lazy-loading 'flavor' on Instance uuid d4a8c153-7585-4c78-8aa4-56077e0a7af6 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.360631] env[61839]: INFO nova.compute.manager [-] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Took 1.38 seconds to deallocate network for instance. [ 1000.395877] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.843s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.398262] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.055s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.398528] env[61839]: DEBUG nova.objects.instance [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lazy-loading 'resources' on Instance uuid 23ee24d5-bccd-497d-a53f-b9723fd9c707 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.420747] env[61839]: INFO nova.scheduler.client.report [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted allocations for instance 694a5d4b-3673-406b-a24a-d37fad33e549 [ 1000.572843] env[61839]: DEBUG nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1000.585417] env[61839]: DEBUG oslo_vmware.api [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314942, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.626852] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1000.627175] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1000.627380] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.627591] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1000.627743] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.627900] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1000.628731] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1000.628979] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1000.629220] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1000.629465] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1000.629686] env[61839]: DEBUG nova.virt.hardware [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1000.631761] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34f3f8e-861f-49d7-9edf-d63078048af4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.653124] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c5e64e-5eda-45c9-ad6d-125ac4cad8ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.868450] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.929674] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4adac468-11aa-40c8-bff0-c8fe43509589 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "694a5d4b-3673-406b-a24a-d37fad33e549" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.237s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.990487] env[61839]: DEBUG nova.compute.manager [req-a778de5e-3f21-4329-9bf4-cfb1beb6d3b2 req-21697e6a-9b35-40dc-bd9a-76898a8ec50d service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Received event network-vif-plugged-c59a02ca-52d1-49bd-a536-7eadb7307eb4 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1000.990727] env[61839]: DEBUG oslo_concurrency.lockutils [req-a778de5e-3f21-4329-9bf4-cfb1beb6d3b2 req-21697e6a-9b35-40dc-bd9a-76898a8ec50d service nova] Acquiring lock "8e9bea05-d6d7-40a8-997d-8c952f596f75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.991844] env[61839]: DEBUG oslo_concurrency.lockutils [req-a778de5e-3f21-4329-9bf4-cfb1beb6d3b2 req-21697e6a-9b35-40dc-bd9a-76898a8ec50d service nova] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.992036] env[61839]: DEBUG oslo_concurrency.lockutils [req-a778de5e-3f21-4329-9bf4-cfb1beb6d3b2 req-21697e6a-9b35-40dc-bd9a-76898a8ec50d service nova] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.992503] env[61839]: DEBUG nova.compute.manager [req-a778de5e-3f21-4329-9bf4-cfb1beb6d3b2 req-21697e6a-9b35-40dc-bd9a-76898a8ec50d service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] No waiting events found dispatching network-vif-plugged-c59a02ca-52d1-49bd-a536-7eadb7307eb4 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1000.992503] env[61839]: WARNING nova.compute.manager [req-a778de5e-3f21-4329-9bf4-cfb1beb6d3b2 req-21697e6a-9b35-40dc-bd9a-76898a8ec50d service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Received unexpected event network-vif-plugged-c59a02ca-52d1-49bd-a536-7eadb7307eb4 for instance with vm_state building and task_state spawning. [ 1001.080973] env[61839]: DEBUG oslo_vmware.api [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314942, 'name': PowerOnVM_Task, 'duration_secs': 0.596082} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.083971] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Resumed the VM {{(pid=61839) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1001.084206] env[61839]: DEBUG nova.compute.manager [None req-4441f351-3933-454b-aa8a-c8307395c223 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1001.085791] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184f83e9-1fc9-493f-9d17-a9afc427f584 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.138427] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d2b3c8-a79d-49ef-940a-db320556976f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.147506] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f687035c-3e80-4099-82e7-a89f795ff22d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.196850] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11878b46-0c76-44bb-b31f-014908a5278f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.206290] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ff47ae-93d0-4799-afbc-54d1556f742d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.221977] env[61839]: DEBUG nova.compute.provider_tree [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.260277] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dd885c2-6f9f-4e53-918a-447bb45e31c9 tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.275s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.536193] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-7aee83a4-620a-48c2-a47b-7d47e05a7a07" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.536621] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-7aee83a4-620a-48c2-a47b-7d47e05a7a07" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.566960] env[61839]: DEBUG nova.network.neutron [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Successfully updated port: c59a02ca-52d1-49bd-a536-7eadb7307eb4 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1001.576788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.576788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.576788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.576788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.576788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.578377] env[61839]: INFO nova.compute.manager [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Terminating instance [ 1001.581178] env[61839]: DEBUG nova.compute.manager [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1001.581380] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.582353] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877298cc-de85-4658-b7ae-b5d74744c4e0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.587955] env[61839]: DEBUG nova.compute.manager [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Received event network-changed-c59a02ca-52d1-49bd-a536-7eadb7307eb4 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.588168] env[61839]: DEBUG nova.compute.manager [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Refreshing instance network info cache due to event network-changed-c59a02ca-52d1-49bd-a536-7eadb7307eb4. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1001.588420] env[61839]: DEBUG oslo_concurrency.lockutils [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] Acquiring lock "refresh_cache-8e9bea05-d6d7-40a8-997d-8c952f596f75" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.588569] env[61839]: DEBUG oslo_concurrency.lockutils [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] Acquired lock "refresh_cache-8e9bea05-d6d7-40a8-997d-8c952f596f75" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.588742] env[61839]: DEBUG nova.network.neutron [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Refreshing network info cache for port c59a02ca-52d1-49bd-a536-7eadb7307eb4 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.592490] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.592906] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da78e792-19b3-4eb3-a458-212f133e9e84 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.601613] env[61839]: DEBUG oslo_vmware.api [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 1001.601613] env[61839]: value = "task-1314943" [ 1001.601613] env[61839]: _type = "Task" [ 1001.601613] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.611682] env[61839]: DEBUG oslo_vmware.api [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.725656] env[61839]: DEBUG nova.scheduler.client.report [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.040039] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.040280] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.041250] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc2f0a6-532d-4c47-a215-32cf70541a26 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.061310] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62621ead-46be-4008-a0df-e63b36d6db51 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.084762] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "refresh_cache-8e9bea05-d6d7-40a8-997d-8c952f596f75" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.090390] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfiguring VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1002.090690] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7596f3cd-45ab-455b-b486-9636cb16400a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.113836] env[61839]: DEBUG oslo_vmware.api [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314943, 'name': PowerOffVM_Task, 'duration_secs': 0.454746} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.115135] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.115330] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.116026] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1002.116026] env[61839]: value = "task-1314944" [ 1002.116026] env[61839]: _type = "Task" [ 1002.116026] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.116193] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4297bf9c-447e-4db3-a797-cfdec691b9b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.126680] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.137839] env[61839]: DEBUG nova.network.neutron [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.223475] env[61839]: DEBUG nova.network.neutron [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.230714] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.233431] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.069s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.233664] env[61839]: DEBUG nova.objects.instance [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lazy-loading 'resources' on Instance uuid 75344275-bdf2-4526-a101-e62ec270dd72 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.259235] env[61839]: INFO nova.scheduler.client.report [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted allocations for instance 23ee24d5-bccd-497d-a53f-b9723fd9c707 [ 1002.320445] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.320772] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.321088] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Deleting the datastore file [datastore1] d4a8c153-7585-4c78-8aa4-56077e0a7af6 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.321438] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-673c9096-b72e-40cc-aba2-7857e538f29e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.333635] env[61839]: DEBUG oslo_vmware.api [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for the task: (returnval){ [ 1002.333635] env[61839]: value = "task-1314946" [ 1002.333635] env[61839]: _type = "Task" [ 1002.333635] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.343135] env[61839]: DEBUG oslo_vmware.api [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.628616] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.726424] env[61839]: DEBUG oslo_concurrency.lockutils [req-27ad9fe9-2283-4734-987e-03658b1155bd req-dbd9030f-d487-45c5-9326-7fb2f31e75c1 service nova] Releasing lock "refresh_cache-8e9bea05-d6d7-40a8-997d-8c952f596f75" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.726900] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "refresh_cache-8e9bea05-d6d7-40a8-997d-8c952f596f75" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.727125] env[61839]: DEBUG nova.network.neutron [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.774700] env[61839]: DEBUG oslo_concurrency.lockutils [None req-dff80fa7-071d-4022-95f6-38022e3c7b75 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "23ee24d5-bccd-497d-a53f-b9723fd9c707" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.487s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.845674] env[61839]: DEBUG oslo_vmware.api [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Task: {'id': task-1314946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143294} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.845946] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.846175] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.846370] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.846549] env[61839]: INFO nova.compute.manager [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1002.846798] env[61839]: DEBUG oslo.service.loopingcall [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.846991] env[61839]: DEBUG nova.compute.manager [-] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1002.847100] env[61839]: DEBUG nova.network.neutron [-] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.903826] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3f4cb9-6a00-408e-9667-1b27da3d16e1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.914399] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0252e984-ec61-49ba-af16-9fcdb9d17d91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.951578] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3bf636-07c2-40e9-b28b-a44bfd845b7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.960179] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc21bcf-50db-4361-a7e5-58379117c50a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.974370] env[61839]: DEBUG nova.compute.provider_tree [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.130194] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.261352] env[61839]: DEBUG nova.network.neutron [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.477372] env[61839]: DEBUG nova.scheduler.client.report [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1003.515978] env[61839]: DEBUG nova.network.neutron [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Updating instance_info_cache with network_info: [{"id": "c59a02ca-52d1-49bd-a536-7eadb7307eb4", "address": "fa:16:3e:58:fe:cb", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc59a02ca-52", "ovs_interfaceid": "c59a02ca-52d1-49bd-a536-7eadb7307eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.629909] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.652673] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.653764] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.711727] env[61839]: DEBUG nova.compute.manager [req-27ded58d-b2d6-430d-acb4-0c87f3449bfa req-ddf304e8-0121-46de-8a64-6e8d16a9f9ce service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Received event network-vif-deleted-648944d5-7ed5-40cb-8a22-8ea3244538d7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.711772] env[61839]: INFO nova.compute.manager [req-27ded58d-b2d6-430d-acb4-0c87f3449bfa req-ddf304e8-0121-46de-8a64-6e8d16a9f9ce service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Neutron deleted interface 648944d5-7ed5-40cb-8a22-8ea3244538d7; detaching it from the instance and deleting it from the info cache [ 1003.711909] env[61839]: DEBUG nova.network.neutron [req-27ded58d-b2d6-430d-acb4-0c87f3449bfa req-ddf304e8-0121-46de-8a64-6e8d16a9f9ce service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.819057] env[61839]: DEBUG nova.network.neutron [-] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.986154] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.988907] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.637s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.989228] env[61839]: DEBUG nova.objects.instance [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lazy-loading 'resources' on Instance uuid 5c29c188-a34b-4751-9f8b-166af7b15088 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.012618] env[61839]: INFO nova.scheduler.client.report [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Deleted allocations for instance 75344275-bdf2-4526-a101-e62ec270dd72 [ 1004.021015] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "refresh_cache-8e9bea05-d6d7-40a8-997d-8c952f596f75" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.021015] env[61839]: DEBUG nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Instance network_info: |[{"id": "c59a02ca-52d1-49bd-a536-7eadb7307eb4", "address": "fa:16:3e:58:fe:cb", "network": {"id": "2334f355-8fd7-4df2-ba1c-a28b5fde97f2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-202913707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8935bcc7ee644cb7a2a33557a708189c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc59a02ca-52", "ovs_interfaceid": "c59a02ca-52d1-49bd-a536-7eadb7307eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1004.021015] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:fe:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c59a02ca-52d1-49bd-a536-7eadb7307eb4', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.027336] env[61839]: DEBUG oslo.service.loopingcall [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.027692] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.028028] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-430e81a9-0f3f-470e-8013-bf8cd23af52b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.051036] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.051036] env[61839]: value = "task-1314947" [ 1004.051036] env[61839]: _type = "Task" [ 1004.051036] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.061633] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314947, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.128927] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.155279] env[61839]: DEBUG nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1004.214734] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f14319d8-fd74-4bf4-bbd4-f97926e16695 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.225115] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f8a00a-d3c6-4361-b646-05a5ecfd95a2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.254334] env[61839]: DEBUG nova.compute.manager [req-27ded58d-b2d6-430d-acb4-0c87f3449bfa req-ddf304e8-0121-46de-8a64-6e8d16a9f9ce service nova] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Detach interface failed, port_id=648944d5-7ed5-40cb-8a22-8ea3244538d7, reason: Instance d4a8c153-7585-4c78-8aa4-56077e0a7af6 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1004.321631] env[61839]: INFO nova.compute.manager [-] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Took 1.47 seconds to deallocate network for instance. [ 1004.521378] env[61839]: DEBUG oslo_concurrency.lockutils [None req-03fce1b6-c3ef-40c6-9a3d-6a8cc4836ddc tempest-ServerShowV247Test-153956385 tempest-ServerShowV247Test-153956385-project-member] Lock "75344275-bdf2-4526-a101-e62ec270dd72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.146s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.564294] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314947, 'name': CreateVM_Task, 'duration_secs': 0.336952} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.564294] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.564944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.565187] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.565532] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1004.565817] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95ed68f1-ca4f-4002-8115-e51367cac382 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.573983] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1004.573983] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5203bad6-f232-a3ea-32d3-862518b262f0" [ 1004.573983] env[61839]: _type = "Task" [ 1004.573983] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.584181] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5203bad6-f232-a3ea-32d3-862518b262f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.631680] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.658648] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb6e9a5-be82-45f7-9e9f-7cd8a56dce39 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.668763] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e1975e-c217-4a71-bf47-7a4994331b25 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.674235] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.699393] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544d1f73-3dff-4521-a0fd-b8326ccc5689 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.707485] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd497ce-9a79-48ef-9a1a-66ea9c94b6e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.721896] env[61839]: DEBUG nova.compute.provider_tree [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.827935] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.086857] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5203bad6-f232-a3ea-32d3-862518b262f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010291} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.087205] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.087452] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.087696] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.087847] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.088050] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.088331] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cbd9f19-b5eb-4d21-a9e4-5fa9950cc86c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.098488] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.098941] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.099458] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bf997be-06d6-4d9c-8abd-e9fbc7c1f4bf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.105343] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1005.105343] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52804f2c-c5b4-a574-12c0-82e91eb83e5e" [ 1005.105343] env[61839]: _type = "Task" [ 1005.105343] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.114672] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52804f2c-c5b4-a574-12c0-82e91eb83e5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.132011] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.225442] env[61839]: DEBUG nova.scheduler.client.report [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1005.615878] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52804f2c-c5b4-a574-12c0-82e91eb83e5e, 'name': SearchDatastore_Task, 'duration_secs': 0.010085} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.616756] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28ca1429-84b3-4fed-9f5b-3057165cecff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.622532] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1005.622532] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5282ccf0-6211-1f7d-8414-1aa3395a7296" [ 1005.622532] env[61839]: _type = "Task" [ 1005.622532] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.633471] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.636374] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5282ccf0-6211-1f7d-8414-1aa3395a7296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.730054] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.741s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.733472] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.259s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.733472] env[61839]: DEBUG nova.objects.instance [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1005.760278] env[61839]: INFO nova.scheduler.client.report [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Deleted allocations for instance 5c29c188-a34b-4751-9f8b-166af7b15088 [ 1006.138356] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.139064] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5282ccf0-6211-1f7d-8414-1aa3395a7296, 'name': SearchDatastore_Task, 'duration_secs': 0.009864} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.139174] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.139673] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 8e9bea05-d6d7-40a8-997d-8c952f596f75/8e9bea05-d6d7-40a8-997d-8c952f596f75.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.139673] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ef1a9a8-bb64-442e-8aa5-0d999d175a6e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.146216] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1006.146216] env[61839]: value = "task-1314948" [ 1006.146216] env[61839]: _type = "Task" [ 1006.146216] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.154074] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.268403] env[61839]: DEBUG oslo_concurrency.lockutils [None req-09fb80b3-5be1-4023-9fa0-7b2ce3a7a3f4 tempest-ServerRescueNegativeTestJSON-712447946 tempest-ServerRescueNegativeTestJSON-712447946-project-member] Lock "5c29c188-a34b-4751-9f8b-166af7b15088" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.928s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.636395] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.656787] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314948, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491994} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.657082] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 8e9bea05-d6d7-40a8-997d-8c952f596f75/8e9bea05-d6d7-40a8-997d-8c952f596f75.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.657323] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.657593] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad028a05-97b8-4ae9-801f-44a790168534 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.664018] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1006.664018] env[61839]: value = "task-1314949" [ 1006.664018] env[61839]: _type = "Task" [ 1006.664018] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.672653] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.743234] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a7050108-7622-4d4a-b278-16e5711e81d2 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.744528] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.086s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.746256] env[61839]: INFO nova.compute.claims [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.058862] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.059173] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.059397] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.059580] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.059755] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.062116] env[61839]: INFO nova.compute.manager [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Terminating instance [ 1007.063983] env[61839]: DEBUG nova.compute.manager [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1007.064197] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.065052] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf289cc-7c51-4512-810a-5ce512608f9f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.073996] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.074510] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93602614-f887-41e5-b0f9-5daf96ad3b5c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.080853] env[61839]: DEBUG oslo_vmware.api [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 1007.080853] env[61839]: value = "task-1314950" [ 1007.080853] env[61839]: _type = "Task" [ 1007.080853] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.088735] env[61839]: DEBUG oslo_vmware.api [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.133399] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.172873] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.592562] env[61839]: DEBUG oslo_vmware.api [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.634454] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.675011] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.851506} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.679192] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.680149] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7c355a-8c59-4ebf-8d15-25060dc93b7f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.706604] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 8e9bea05-d6d7-40a8-997d-8c952f596f75/8e9bea05-d6d7-40a8-997d-8c952f596f75.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.706776] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-341385f4-9d49-4b0d-a4c9-c696d5475a8f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.728222] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1007.728222] env[61839]: value = "task-1314951" [ 1007.728222] env[61839]: _type = "Task" [ 1007.728222] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.736949] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314951, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.925594] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a7b508-7a0c-475b-a467-3a077165dd94 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.938802] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e954ee-2232-4b9b-be3f-09dde38b480e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.970359] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca8ca9b-14b4-4cd7-ac38-eaf347fecf1b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.979263] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd521fca-db65-4a2b-acde-e9c8f6cc6439 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.996020] env[61839]: DEBUG nova.compute.provider_tree [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.094586] env[61839]: DEBUG oslo_vmware.api [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314950, 'name': PowerOffVM_Task, 'duration_secs': 0.588581} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.094982] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.095220] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.095621] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db8c4197-3f30-4efa-aef8-2238f92d1da1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.134804] env[61839]: DEBUG oslo_vmware.api [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314944, 'name': ReconfigVM_Task, 'duration_secs': 5.757443} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.135108] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.135339] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Reconfigured VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1008.200141] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.200141] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.200141] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleting the datastore file [datastore1] 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.200141] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9f88ab5-20ac-4e48-add6-b41722be2727 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.208522] env[61839]: DEBUG oslo_vmware.api [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for the task: (returnval){ [ 1008.208522] env[61839]: value = "task-1314953" [ 1008.208522] env[61839]: _type = "Task" [ 1008.208522] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.217613] env[61839]: DEBUG oslo_vmware.api [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.238604] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314951, 'name': ReconfigVM_Task, 'duration_secs': 0.286997} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.239053] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 8e9bea05-d6d7-40a8-997d-8c952f596f75/8e9bea05-d6d7-40a8-997d-8c952f596f75.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.239759] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82d14550-7b8c-4ad7-aae3-aa3b3986daf0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.246506] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1008.246506] env[61839]: value = "task-1314954" [ 1008.246506] env[61839]: _type = "Task" [ 1008.246506] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.255177] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314954, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.371071] env[61839]: DEBUG nova.compute.manager [req-f303e656-3875-4340-94e1-46d31444aea6 req-1e60ad4a-8165-49a0-91b0-82f0b0fb711b service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-vif-deleted-7aee83a4-620a-48c2-a47b-7d47e05a7a07 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.371071] env[61839]: INFO nova.compute.manager [req-f303e656-3875-4340-94e1-46d31444aea6 req-1e60ad4a-8165-49a0-91b0-82f0b0fb711b service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Neutron deleted interface 7aee83a4-620a-48c2-a47b-7d47e05a7a07; detaching it from the instance and deleting it from the info cache [ 1008.371485] env[61839]: DEBUG nova.network.neutron [req-f303e656-3875-4340-94e1-46d31444aea6 req-1e60ad4a-8165-49a0-91b0-82f0b0fb711b service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a9bbfd1a-7c62-42c2-ba9b-587b125ef832", "address": "fa:16:3e:61:7c:34", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9bbfd1a-7c", "ovs_interfaceid": "a9bbfd1a-7c62-42c2-ba9b-587b125ef832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.499142] env[61839]: DEBUG nova.scheduler.client.report [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.718832] env[61839]: DEBUG oslo_vmware.api [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Task: {'id': task-1314953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282964} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.719155] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.719363] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.719552] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.719734] env[61839]: INFO nova.compute.manager [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1008.719985] env[61839]: DEBUG oslo.service.loopingcall [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.720205] env[61839]: DEBUG nova.compute.manager [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1008.720301] env[61839]: DEBUG nova.network.neutron [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1008.756797] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314954, 'name': Rename_Task, 'duration_secs': 0.179971} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.757142] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.757369] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e809dea-c2d8-4313-a702-dd20851427a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.765259] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1008.765259] env[61839]: value = "task-1314955" [ 1008.765259] env[61839]: _type = "Task" [ 1008.765259] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.774876] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.878571] env[61839]: DEBUG oslo_concurrency.lockutils [req-f303e656-3875-4340-94e1-46d31444aea6 req-1e60ad4a-8165-49a0-91b0-82f0b0fb711b service nova] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.878789] env[61839]: DEBUG oslo_concurrency.lockutils [req-f303e656-3875-4340-94e1-46d31444aea6 req-1e60ad4a-8165-49a0-91b0-82f0b0fb711b service nova] Acquired lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.879668] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406399b5-7589-422c-a416-d2c0175055d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.900038] env[61839]: DEBUG oslo_concurrency.lockutils [req-f303e656-3875-4340-94e1-46d31444aea6 req-1e60ad4a-8165-49a0-91b0-82f0b0fb711b service nova] Releasing lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.900648] env[61839]: WARNING nova.compute.manager [req-f303e656-3875-4340-94e1-46d31444aea6 req-1e60ad4a-8165-49a0-91b0-82f0b0fb711b service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Detach interface failed, port_id=7aee83a4-620a-48c2-a47b-7d47e05a7a07, reason: No device with interface-id 7aee83a4-620a-48c2-a47b-7d47e05a7a07 exists on VM: nova.exception.NotFound: No device with interface-id 7aee83a4-620a-48c2-a47b-7d47e05a7a07 exists on VM [ 1009.005040] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.260s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.005672] env[61839]: DEBUG nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1009.008423] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.468s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.008631] env[61839]: DEBUG nova.objects.instance [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'resources' on Instance uuid 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.280468] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314955, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.347128] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.347495] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.347816] env[61839]: DEBUG nova.network.neutron [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.381096] env[61839]: DEBUG nova.compute.manager [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-vif-deleted-a9bbfd1a-7c62-42c2-ba9b-587b125ef832 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.381321] env[61839]: INFO nova.compute.manager [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Neutron deleted interface a9bbfd1a-7c62-42c2-ba9b-587b125ef832; detaching it from the instance and deleting it from the info cache [ 1009.381625] env[61839]: DEBUG nova.network.neutron [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.467925] env[61839]: DEBUG nova.network.neutron [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.512082] env[61839]: DEBUG nova.compute.utils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1009.513587] env[61839]: DEBUG nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1009.513767] env[61839]: DEBUG nova.network.neutron [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1009.558014] env[61839]: DEBUG nova.policy [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8abcff2ffe534da3983ec78c3671110d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f789f3900a347b59c491e9d141fb9e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1009.657407] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d936fb2f-b1e4-45bc-8b6a-b66a45f6fb5d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.665759] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3566c08-b11b-427e-ab16-4181510d9de4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.696631] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ca4e53-2a35-4ba2-8938-37313f62258a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.704741] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e778571-381c-45a7-b720-082d305ae6c1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.718412] env[61839]: DEBUG nova.compute.provider_tree [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.778463] env[61839]: DEBUG oslo_vmware.api [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314955, 'name': PowerOnVM_Task, 'duration_secs': 0.661238} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.778780] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.778997] env[61839]: INFO nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Took 9.21 seconds to spawn the instance on the hypervisor. [ 1009.779204] env[61839]: DEBUG nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1009.780299] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff123a2-da90-48df-9928-f0221ead3e80 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.822237] env[61839]: DEBUG nova.network.neutron [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Successfully created port: da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.857634] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.857901] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "d718d866-dd6c-4332-b63a-be6850a5a785" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.858123] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.858314] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.858486] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "d718d866-dd6c-4332-b63a-be6850a5a785-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.860455] env[61839]: INFO nova.compute.manager [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Terminating instance [ 1009.862386] env[61839]: DEBUG nova.compute.manager [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1009.862585] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.863404] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36395e0-0a8a-4dc4-859c-636075b8fbda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.871940] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.872205] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cbd93a1-5e7c-4131-ae5b-c3ebf26aa22e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.879950] env[61839]: DEBUG oslo_vmware.api [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1009.879950] env[61839]: value = "task-1314956" [ 1009.879950] env[61839]: _type = "Task" [ 1009.879950] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.883798] env[61839]: DEBUG oslo_concurrency.lockutils [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] Acquiring lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.890794] env[61839]: DEBUG oslo_vmware.api [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.971331] env[61839]: INFO nova.compute.manager [-] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Took 1.25 seconds to deallocate network for instance. [ 1010.019178] env[61839]: DEBUG nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1010.164237] env[61839]: INFO nova.network.neutron [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Port a9bbfd1a-7c62-42c2-ba9b-587b125ef832 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1010.164635] env[61839]: DEBUG nova.network.neutron [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [{"id": "5d041d96-4a6e-44d5-a31a-e597194524e0", "address": "fa:16:3e:a9:bd:d2", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d041d96-4a", "ovs_interfaceid": "5d041d96-4a6e-44d5-a31a-e597194524e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.221639] env[61839]: DEBUG nova.scheduler.client.report [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1010.303419] env[61839]: INFO nova.compute.manager [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Took 17.39 seconds to build instance. [ 1010.391161] env[61839]: DEBUG oslo_vmware.api [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314956, 'name': PowerOffVM_Task, 'duration_secs': 0.223016} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.391536] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.391778] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.392418] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-679c37cc-c326-44a1-b9f1-3abfb583e4f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.396099] env[61839]: DEBUG nova.compute.manager [req-df89e3c1-777a-4f40-8638-2649a1cc061c req-aed695c8-bbbd-4797-bfae-d45854440de5 service nova] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Received event network-vif-deleted-ef8176cf-7494-44f4-a600-7dedff162419 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.478169] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.529930] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.530181] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.530392] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleting the datastore file [datastore1] d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.530689] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16b07eb6-24f6-452e-84a3-06c70b545d14 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.540929] env[61839]: DEBUG oslo_vmware.api [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1010.540929] env[61839]: value = "task-1314958" [ 1010.540929] env[61839]: _type = "Task" [ 1010.540929] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.550744] env[61839]: DEBUG oslo_vmware.api [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.667660] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.727265] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.719s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.730022] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.840s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.730328] env[61839]: DEBUG nova.objects.instance [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'resources' on Instance uuid 73b83239-bbc8-41d1-aec3-2b4519c320af {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.751304] env[61839]: INFO nova.scheduler.client.report [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted allocations for instance 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65 [ 1010.805575] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0d0cf358-0b30-4267-bdcc-e309211a3bd4 tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.900s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.029409] env[61839]: DEBUG nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1011.052152] env[61839]: DEBUG oslo_vmware.api [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221669} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.053841] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.054120] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.054317] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.054498] env[61839]: INFO nova.compute.manager [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1011.054745] env[61839]: DEBUG oslo.service.loopingcall [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.055138] env[61839]: DEBUG nova.compute.manager [-] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1011.055249] env[61839]: DEBUG nova.network.neutron [-] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.062156] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1011.062393] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1011.062557] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.062748] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1011.062902] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.063068] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1011.063311] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1011.063481] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1011.063654] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1011.063818] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1011.063994] env[61839]: DEBUG nova.virt.hardware [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1011.064843] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92be5d4b-d243-4823-8ed4-21a5f1a21f5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.078710] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd4a080-b9f1-4353-a9ba-9ec39aa57bca {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.104512] env[61839]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port a9bbfd1a-7c62-42c2-ba9b-587b125ef832 could not be found.", "detail": ""}} {{(pid=61839) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1011.104667] env[61839]: DEBUG nova.network.neutron [-] Unable to show port a9bbfd1a-7c62-42c2-ba9b-587b125ef832 as it no longer exists. {{(pid=61839) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1011.172648] env[61839]: DEBUG oslo_concurrency.lockutils [None req-090b33ea-ca3b-446b-9b43-95a19f225291 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-d718d866-dd6c-4332-b63a-be6850a5a785-7aee83a4-620a-48c2-a47b-7d47e05a7a07" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.635s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.261215] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e789b1d9-8865-4e5c-a56e-08cef8f5eef8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "3e27bc4a-a4f3-4929-931a-0c3ecaf10e65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.666s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.401079] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43f86be-bf1b-4bc2-8a8b-23d64bc97f05 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.415023] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4b11ca-c833-409e-ac68-ab41824533d2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.448944] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472c944b-86b1-4916-8590-267c66bedb9d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.458910] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf4152d-65b2-4e58-927a-6fc21342109f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.476291] env[61839]: DEBUG nova.compute.provider_tree [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.572758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "8e9bea05-d6d7-40a8-997d-8c952f596f75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.572758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.574561] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "8e9bea05-d6d7-40a8-997d-8c952f596f75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.574561] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.574561] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.576696] env[61839]: INFO nova.compute.manager [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Terminating instance [ 1011.580833] env[61839]: DEBUG nova.compute.manager [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1011.581034] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.582472] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917c12d0-9258-44a4-9ac0-825c1928825f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.590381] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.590617] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92779461-d8e1-45cc-82a8-22f64aa5b8d5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.597714] env[61839]: DEBUG oslo_vmware.api [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1011.597714] env[61839]: value = "task-1314959" [ 1011.597714] env[61839]: _type = "Task" [ 1011.597714] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.608257] env[61839]: DEBUG oslo_vmware.api [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.674919] env[61839]: DEBUG nova.network.neutron [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Successfully updated port: da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1011.746113] env[61839]: DEBUG nova.compute.manager [req-6f471a10-0823-42ff-899e-5fab407240a9 req-149ccc0c-8b86-4d79-adf3-22fff1994378 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Received event network-vif-deleted-5d041d96-4a6e-44d5-a31a-e597194524e0 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1011.746113] env[61839]: INFO nova.compute.manager [req-6f471a10-0823-42ff-899e-5fab407240a9 req-149ccc0c-8b86-4d79-adf3-22fff1994378 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Neutron deleted interface 5d041d96-4a6e-44d5-a31a-e597194524e0; detaching it from the instance and deleting it from the info cache [ 1011.746113] env[61839]: DEBUG nova.network.neutron [req-6f471a10-0823-42ff-899e-5fab407240a9 req-149ccc0c-8b86-4d79-adf3-22fff1994378 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.799872] env[61839]: DEBUG nova.compute.manager [req-8ad817e3-323e-4c86-b83d-ff9ae53a6f3d req-772c8408-ec85-42fb-ab23-7fd74eb0ee7e service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Received event network-vif-plugged-da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1011.800479] env[61839]: DEBUG oslo_concurrency.lockutils [req-8ad817e3-323e-4c86-b83d-ff9ae53a6f3d req-772c8408-ec85-42fb-ab23-7fd74eb0ee7e service nova] Acquiring lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.800805] env[61839]: DEBUG oslo_concurrency.lockutils [req-8ad817e3-323e-4c86-b83d-ff9ae53a6f3d req-772c8408-ec85-42fb-ab23-7fd74eb0ee7e service nova] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.803800] env[61839]: DEBUG oslo_concurrency.lockutils [req-8ad817e3-323e-4c86-b83d-ff9ae53a6f3d req-772c8408-ec85-42fb-ab23-7fd74eb0ee7e service nova] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.803800] env[61839]: DEBUG nova.compute.manager [req-8ad817e3-323e-4c86-b83d-ff9ae53a6f3d req-772c8408-ec85-42fb-ab23-7fd74eb0ee7e service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] No waiting events found dispatching network-vif-plugged-da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1011.803800] env[61839]: WARNING nova.compute.manager [req-8ad817e3-323e-4c86-b83d-ff9ae53a6f3d req-772c8408-ec85-42fb-ab23-7fd74eb0ee7e service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Received unexpected event network-vif-plugged-da11baa5-354e-440b-a384-10cd83ff2715 for instance with vm_state building and task_state spawning. [ 1011.980071] env[61839]: DEBUG nova.scheduler.client.report [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1012.018190] env[61839]: DEBUG nova.network.neutron [-] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.108401] env[61839]: DEBUG oslo_vmware.api [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314959, 'name': PowerOffVM_Task, 'duration_secs': 0.196841} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.109118] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.109408] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.109920] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8505899d-7297-406f-a1a3-0d95af3b9350 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.171617] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.171845] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.172110] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleting the datastore file [datastore2] 8e9bea05-d6d7-40a8-997d-8c952f596f75 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.172480] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81d4c2a3-331c-4c99-9587-8d80e9fdd01e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.177481] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.177686] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.177775] env[61839]: DEBUG nova.network.neutron [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.180196] env[61839]: DEBUG oslo_vmware.api [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for the task: (returnval){ [ 1012.180196] env[61839]: value = "task-1314961" [ 1012.180196] env[61839]: _type = "Task" [ 1012.180196] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.189165] env[61839]: DEBUG oslo_vmware.api [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.249301] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ae70471-cbff-4344-9c41-525c149951b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.259369] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb2fb7e-5d40-42c6-90bc-88b89510785d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.286099] env[61839]: DEBUG nova.compute.manager [req-6f471a10-0823-42ff-899e-5fab407240a9 req-149ccc0c-8b86-4d79-adf3-22fff1994378 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Detach interface failed, port_id=5d041d96-4a6e-44d5-a31a-e597194524e0, reason: Instance d718d866-dd6c-4332-b63a-be6850a5a785 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1012.332468] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.332728] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.420936] env[61839]: DEBUG nova.compute.manager [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Received event network-changed-da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1012.421138] env[61839]: DEBUG nova.compute.manager [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Refreshing instance network info cache due to event network-changed-da11baa5-354e-440b-a384-10cd83ff2715. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1012.421342] env[61839]: DEBUG oslo_concurrency.lockutils [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] Acquiring lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.485416] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.755s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.488661] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.620s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.488661] env[61839]: DEBUG nova.objects.instance [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'resources' on Instance uuid a87f3a17-0a97-4b47-bc95-eee5975f8203 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.511164] env[61839]: INFO nova.scheduler.client.report [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted allocations for instance 73b83239-bbc8-41d1-aec3-2b4519c320af [ 1012.521402] env[61839]: INFO nova.compute.manager [-] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Took 1.47 seconds to deallocate network for instance. [ 1012.695032] env[61839]: DEBUG oslo_vmware.api [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Task: {'id': task-1314961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134919} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.695032] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.695032] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.695032] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.695397] env[61839]: INFO nova.compute.manager [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1012.695692] env[61839]: DEBUG oslo.service.loopingcall [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.695990] env[61839]: DEBUG nova.compute.manager [-] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1012.696164] env[61839]: DEBUG nova.network.neutron [-] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1012.734497] env[61839]: DEBUG nova.network.neutron [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1012.838246] env[61839]: DEBUG nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1012.981553] env[61839]: DEBUG nova.network.neutron [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.020910] env[61839]: DEBUG oslo_concurrency.lockutils [None req-92b02466-d43a-4dfe-87c4-d668863cf3fe tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "73b83239-bbc8-41d1-aec3-2b4519c320af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.736s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.029037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.124477] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a848ed-a2ea-4e71-a41b-a297445f492b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.132574] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0808964-9644-44ca-8c1c-5f3df355fb97 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.163722] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01fd4de-aaaa-47b3-9a0b-bd85b4918238 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.171046] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafec66b-24e2-4b34-8b50-c70c9223a2a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.184706] env[61839]: DEBUG nova.compute.provider_tree [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.361329] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.485227] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.485588] env[61839]: DEBUG nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Instance network_info: |[{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1013.485908] env[61839]: DEBUG oslo_concurrency.lockutils [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] Acquired lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.486116] env[61839]: DEBUG nova.network.neutron [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Refreshing network info cache for port da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1013.487744] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:dc:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da11baa5-354e-440b-a384-10cd83ff2715', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1013.495056] env[61839]: DEBUG oslo.service.loopingcall [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1013.497961] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1013.498438] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e4d919c-ff72-4ff3-9733-5512ca2d442b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.512735] env[61839]: DEBUG nova.network.neutron [-] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.519522] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1013.519522] env[61839]: value = "task-1314962" [ 1013.519522] env[61839]: _type = "Task" [ 1013.519522] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.528095] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314962, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.687560] env[61839]: DEBUG nova.scheduler.client.report [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1013.760706] env[61839]: DEBUG nova.network.neutron [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updated VIF entry in instance network info cache for port da11baa5-354e-440b-a384-10cd83ff2715. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.761193] env[61839]: DEBUG nova.network.neutron [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.776265] env[61839]: DEBUG nova.compute.manager [req-2ab59eea-f716-4ab8-b28f-9f36ab34b11f req-7e0f1e3d-1b4f-4ec3-b0e2-e5931b7d2db3 service nova] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Received event network-vif-deleted-c59a02ca-52d1-49bd-a536-7eadb7307eb4 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.014935] env[61839]: INFO nova.compute.manager [-] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Took 1.32 seconds to deallocate network for instance. [ 1014.029723] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314962, 'name': CreateVM_Task, 'duration_secs': 0.312926} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.029910] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1014.030569] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.030758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.033290] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1014.033290] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-934bd5fc-3ed5-4a8a-847b-0a0d77b6bfa6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.036137] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1014.036137] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522ad55c-db79-c0b3-94f8-9f4efa92f112" [ 1014.036137] env[61839]: _type = "Task" [ 1014.036137] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.046450] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522ad55c-db79-c0b3-94f8-9f4efa92f112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.192049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.194808] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.521s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.198151] env[61839]: INFO nova.compute.claims [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.219692] env[61839]: INFO nova.scheduler.client.report [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted allocations for instance a87f3a17-0a97-4b47-bc95-eee5975f8203 [ 1014.264513] env[61839]: DEBUG oslo_concurrency.lockutils [req-0f382563-b1e5-4c9d-8f6f-4365353fd16a req-4ee9f8de-8403-4209-9895-e852ee58f247 service nova] Releasing lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.521495] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.548922] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522ad55c-db79-c0b3-94f8-9f4efa92f112, 'name': SearchDatastore_Task, 'duration_secs': 0.00977} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.549264] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.549504] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1014.549840] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.549928] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.550128] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1014.550397] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f574287-0669-45c0-ac45-a8fe24c1ee21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.559021] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1014.559250] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1014.559979] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdaa83bc-65a5-4603-830e-68a776722b0b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.564882] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1014.564882] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52664b61-3966-f23c-3e26-02c9d5351fa9" [ 1014.564882] env[61839]: _type = "Task" [ 1014.564882] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.572042] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52664b61-3966-f23c-3e26-02c9d5351fa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.727275] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a438555f-9d5b-445e-974d-7728accd5417 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "a87f3a17-0a97-4b47-bc95-eee5975f8203" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.346s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.054025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.054327] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.075663] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52664b61-3966-f23c-3e26-02c9d5351fa9, 'name': SearchDatastore_Task, 'duration_secs': 0.008764} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.076459] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdeaf8a8-4b6f-4cf4-8fe4-29a635d1b6de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.081852] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1015.081852] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527dabed-9567-34e0-5aca-562f7aa332e9" [ 1015.081852] env[61839]: _type = "Task" [ 1015.081852] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.089805] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527dabed-9567-34e0-5aca-562f7aa332e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.329421] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1947fd97-249e-4d58-8a3a-747d2a91d473 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.336873] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8899184d-d490-41dd-a8f8-e60e6d78e00c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.367176] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e2820d-df7d-4558-b882-08890df5b9e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.374448] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141f9a1c-1386-4aa4-a16f-a3914b7a02b2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.387296] env[61839]: DEBUG nova.compute.provider_tree [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.556622] env[61839]: DEBUG nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1015.577659] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "15794971-10d8-4234-8a72-90c940dae90c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.577659] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "15794971-10d8-4234-8a72-90c940dae90c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.593064] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]527dabed-9567-34e0-5aca-562f7aa332e9, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.593402] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.593578] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1015.593832] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45f4be92-7c95-4084-92f3-0758ae0456e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.601247] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1015.601247] env[61839]: value = "task-1314963" [ 1015.601247] env[61839]: _type = "Task" [ 1015.601247] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.609400] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.890821] env[61839]: DEBUG nova.scheduler.client.report [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1016.079058] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.082626] env[61839]: DEBUG nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1016.110971] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487533} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.111298] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1016.111545] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1016.111812] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4af5cf86-6937-4bf4-8728-7cfe52243634 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.117988] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1016.117988] env[61839]: value = "task-1314964" [ 1016.117988] env[61839]: _type = "Task" [ 1016.117988] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.125943] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.395997] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.396536] env[61839]: DEBUG nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1016.399209] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.571s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.399439] env[61839]: DEBUG nova.objects.instance [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lazy-loading 'resources' on Instance uuid d4a8c153-7585-4c78-8aa4-56077e0a7af6 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.601284] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.630028] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062669} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.630028] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1016.630734] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff5cd43-cd25-4464-a6ba-2d12c86b715b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.652287] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1016.652602] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26481b70-341f-46b6-ba82-2438b98a98b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.672108] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1016.672108] env[61839]: value = "task-1314965" [ 1016.672108] env[61839]: _type = "Task" [ 1016.672108] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.679780] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.905668] env[61839]: DEBUG nova.compute.utils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1016.908053] env[61839]: DEBUG nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1016.908053] env[61839]: DEBUG nova.network.neutron [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1016.949715] env[61839]: DEBUG nova.policy [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '600416123b9a4d4ab84f866d0a278a9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b73ee7e490914f54925597f38c8cc05b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1017.066292] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4fc2af-d0a7-4d25-8092-6f122429703d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.073869] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe797b77-e156-4314-b58a-7557ab4ff8f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.103394] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1538c4bb-87f5-441d-bd20-3aabc06abfa9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.110861] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e580d5-5516-4bad-8b82-f49d2982f585 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.123697] env[61839]: DEBUG nova.compute.provider_tree [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.184065] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314965, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.227210] env[61839]: DEBUG nova.network.neutron [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Successfully created port: 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1017.410514] env[61839]: DEBUG nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1017.627632] env[61839]: DEBUG nova.scheduler.client.report [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1017.685065] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314965, 'name': ReconfigVM_Task, 'duration_secs': 0.519547} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.685422] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1017.686269] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-900b720d-4553-45fe-9dd8-ca075f444cc9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.694132] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1017.694132] env[61839]: value = "task-1314966" [ 1017.694132] env[61839]: _type = "Task" [ 1017.694132] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.709415] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314966, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.132925] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.734s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.135423] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.657s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.135686] env[61839]: DEBUG nova.objects.instance [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lazy-loading 'resources' on Instance uuid 86525ea7-af75-4b10-85a1-c0fbab73ea5f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.153803] env[61839]: INFO nova.scheduler.client.report [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Deleted allocations for instance d4a8c153-7585-4c78-8aa4-56077e0a7af6 [ 1018.205465] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314966, 'name': Rename_Task, 'duration_secs': 0.162092} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.205753] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.206018] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c358a35-af8b-4adc-a40d-651c055b26aa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.212804] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1018.212804] env[61839]: value = "task-1314967" [ 1018.212804] env[61839]: _type = "Task" [ 1018.212804] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.222643] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.420077] env[61839]: DEBUG nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1018.445811] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1018.446135] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1018.446336] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.446559] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1018.446740] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.446922] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1018.447189] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1018.447397] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1018.447595] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1018.447803] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1018.448033] env[61839]: DEBUG nova.virt.hardware [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.448947] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3726a198-4f52-4bea-a887-7b4593dfd34a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.458484] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e6e9ba-bb13-44dc-990b-2b4f7db116d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.609667] env[61839]: DEBUG nova.compute.manager [req-0421f51e-5a4f-4a3d-b996-3ad67aa60086 req-2b3e2241-4192-47c0-abc3-781cd0d4e707 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received event network-vif-plugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.609904] env[61839]: DEBUG oslo_concurrency.lockutils [req-0421f51e-5a4f-4a3d-b996-3ad67aa60086 req-2b3e2241-4192-47c0-abc3-781cd0d4e707 service nova] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.610141] env[61839]: DEBUG oslo_concurrency.lockutils [req-0421f51e-5a4f-4a3d-b996-3ad67aa60086 req-2b3e2241-4192-47c0-abc3-781cd0d4e707 service nova] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.610325] env[61839]: DEBUG oslo_concurrency.lockutils [req-0421f51e-5a4f-4a3d-b996-3ad67aa60086 req-2b3e2241-4192-47c0-abc3-781cd0d4e707 service nova] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.610502] env[61839]: DEBUG nova.compute.manager [req-0421f51e-5a4f-4a3d-b996-3ad67aa60086 req-2b3e2241-4192-47c0-abc3-781cd0d4e707 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] No waiting events found dispatching network-vif-plugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1018.610671] env[61839]: WARNING nova.compute.manager [req-0421f51e-5a4f-4a3d-b996-3ad67aa60086 req-2b3e2241-4192-47c0-abc3-781cd0d4e707 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received unexpected event network-vif-plugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 for instance with vm_state building and task_state spawning. [ 1018.662270] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a0f8ac31-c920-4938-9206-5ad9182b53cb tempest-AttachVolumeNegativeTest-781189834 tempest-AttachVolumeNegativeTest-781189834-project-member] Lock "d4a8c153-7585-4c78-8aa4-56077e0a7af6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.087s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.717804] env[61839]: DEBUG nova.network.neutron [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Successfully updated port: 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1018.727415] env[61839]: DEBUG oslo_vmware.api [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1314967, 'name': PowerOnVM_Task, 'duration_secs': 0.435353} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.728228] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.728447] env[61839]: INFO nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Took 7.70 seconds to spawn the instance on the hypervisor. [ 1018.728632] env[61839]: DEBUG nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1018.729430] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26985d8d-00d3-4885-b1d3-e53e067db499 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.788227] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df84c3d0-9db4-49d4-abc6-2dd899972f8e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.796351] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5bdf30-64ee-45e3-9d16-9ca210e216a1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.827034] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39227b68-e563-423f-896f-2c0b126bb152 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.834275] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0862ec-efef-4693-ac48-4473159c5a67 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.847105] env[61839]: DEBUG nova.compute.provider_tree [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.222047] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.222047] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.222047] env[61839]: DEBUG nova.network.neutron [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.252844] env[61839]: INFO nova.compute.manager [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Took 22.61 seconds to build instance. [ 1019.350406] env[61839]: DEBUG nova.scheduler.client.report [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1019.538919] env[61839]: DEBUG nova.compute.manager [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Received event network-changed-da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1019.539073] env[61839]: DEBUG nova.compute.manager [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Refreshing instance network info cache due to event network-changed-da11baa5-354e-440b-a384-10cd83ff2715. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1019.539312] env[61839]: DEBUG oslo_concurrency.lockutils [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] Acquiring lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.539468] env[61839]: DEBUG oslo_concurrency.lockutils [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] Acquired lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.539635] env[61839]: DEBUG nova.network.neutron [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Refreshing network info cache for port da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1019.577020] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.577020] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.577020] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1019.754715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-7a32e35e-a270-437e-befd-37ff47a7524e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.124s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.755709] env[61839]: DEBUG nova.network.neutron [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1019.855728] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.857824] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.831s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.857984] env[61839]: DEBUG nova.objects.instance [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'resources' on Instance uuid d718d866-dd6c-4332-b63a-be6850a5a785 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.879506] env[61839]: INFO nova.scheduler.client.report [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Deleted allocations for instance 86525ea7-af75-4b10-85a1-c0fbab73ea5f [ 1019.881377] env[61839]: DEBUG nova.network.neutron [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [{"id": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "address": "fa:16:3e:a5:ad:80", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b7a9c37-10", "ovs_interfaceid": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.277475] env[61839]: DEBUG nova.network.neutron [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updated VIF entry in instance network info cache for port da11baa5-354e-440b-a384-10cd83ff2715. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1020.277887] env[61839]: DEBUG nova.network.neutron [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.387322] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.387647] env[61839]: DEBUG nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Instance network_info: |[{"id": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "address": "fa:16:3e:a5:ad:80", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b7a9c37-10", "ovs_interfaceid": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1020.390042] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:ad:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.397810] env[61839]: DEBUG oslo.service.loopingcall [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.398049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b08b02-fe01-4ad2-8f1b-ff5471ae3022 tempest-ServersNegativeTestJSON-610681102 tempest-ServersNegativeTestJSON-610681102-project-member] Lock "86525ea7-af75-4b10-85a1-c0fbab73ea5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.339s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.401779] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.403040] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-000985ee-8fb6-4aab-94d9-cee4b92162a1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.426970] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.426970] env[61839]: value = "task-1314969" [ 1020.426970] env[61839]: _type = "Task" [ 1020.426970] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.437073] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314969, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.519891] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc407605-f699-4e13-b6f1-15e0abc328c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.527592] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7a6ad6-e946-4f41-bb26-67dd03819eb5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.559442] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8931f708-2a68-43b5-9a6f-6b339570d487 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.566885] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf878c5b-6b31-4fc2-814e-562f8764e699 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.583846] env[61839]: DEBUG nova.compute.provider_tree [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.621397] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.621602] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.621766] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1020.638332] env[61839]: DEBUG nova.compute.manager [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received event network-changed-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.638544] env[61839]: DEBUG nova.compute.manager [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Refreshing instance network info cache due to event network-changed-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1020.638832] env[61839]: DEBUG oslo_concurrency.lockutils [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] Acquiring lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.640095] env[61839]: DEBUG oslo_concurrency.lockutils [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] Acquired lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.640095] env[61839]: DEBUG nova.network.neutron [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Refreshing network info cache for port 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1020.780993] env[61839]: DEBUG oslo_concurrency.lockutils [req-6abc2008-5db0-4daf-a647-cf59082274ea req-82cbd304-6a68-4009-b32d-4327a119eeb6 service nova] Releasing lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.937077] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314969, 'name': CreateVM_Task, 'duration_secs': 0.311259} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.937271] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1020.938011] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.938203] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.938536] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1020.938791] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89717e50-3747-45ff-b2df-df908cb18b86 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.943326] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1020.943326] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523ec1a9-aadb-b71c-abfd-8bac96351c9a" [ 1020.943326] env[61839]: _type = "Task" [ 1020.943326] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.951410] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523ec1a9-aadb-b71c-abfd-8bac96351c9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.087146] env[61839]: DEBUG nova.scheduler.client.report [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1021.355486] env[61839]: DEBUG nova.network.neutron [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updated VIF entry in instance network info cache for port 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1021.355960] env[61839]: DEBUG nova.network.neutron [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [{"id": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "address": "fa:16:3e:a5:ad:80", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b7a9c37-10", "ovs_interfaceid": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.454675] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]523ec1a9-aadb-b71c-abfd-8bac96351c9a, 'name': SearchDatastore_Task, 'duration_secs': 0.009418} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.455082] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.455392] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.455647] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.455800] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.455987] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.456325] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4af15a3f-fa05-44a5-9ee9-22d8019f7fce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.477354] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.477354] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.477820] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0abacc1f-d527-409f-bd87-281c480f9447 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.484494] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1021.484494] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5293e3c9-9f2c-f916-5214-131e7ce0d525" [ 1021.484494] env[61839]: _type = "Task" [ 1021.484494] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.492973] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5293e3c9-9f2c-f916-5214-131e7ce0d525, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.592785] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.595161] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.234s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.596887] env[61839]: INFO nova.compute.claims [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1021.619877] env[61839]: INFO nova.scheduler.client.report [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted allocations for instance d718d866-dd6c-4332-b63a-be6850a5a785 [ 1021.858664] env[61839]: DEBUG oslo_concurrency.lockutils [req-273d56be-0ee3-4380-b1ea-2126cac286cd req-64a85d7f-775e-4cc3-80a1-cc51726295fe service nova] Releasing lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.001646] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5293e3c9-9f2c-f916-5214-131e7ce0d525, 'name': SearchDatastore_Task, 'duration_secs': 0.009395} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.002523] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ecc41d6-5be1-497e-9777-e91435836187 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.007792] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1022.007792] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52701cb9-ca4a-2750-803c-451a932603d3" [ 1022.007792] env[61839]: _type = "Task" [ 1022.007792] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.015317] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52701cb9-ca4a-2750-803c-451a932603d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.083844] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Updating instance_info_cache with network_info: [{"id": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "address": "fa:16:3e:9a:42:3c", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa65466f1-75", "ovs_interfaceid": "a65466f1-75e9-41dd-8045-71a68e9b31d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.127820] env[61839]: DEBUG oslo_concurrency.lockutils [None req-63522841-9a40-4375-9d2e-01b5cde9d722 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "d718d866-dd6c-4332-b63a-be6850a5a785" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.270s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.129023] env[61839]: DEBUG oslo_concurrency.lockutils [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] Acquired lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.130186] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9796f5-21c4-4832-bc16-a366d508545f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.138685] env[61839]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1022.138843] env[61839]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=61839) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1022.139212] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-574efae6-d928-4a8d-b0d5-656c7284aef9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.149925] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28937814-5d9f-4044-b575-86cd239e1ff1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.175350] env[61839]: ERROR root [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-281419' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-281419' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-281419' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-281419'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-281419' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-281419' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-281419'}\n"]: nova.exception.InstanceNotFound: Instance d718d866-dd6c-4332-b63a-be6850a5a785 could not be found. [ 1022.175591] env[61839]: DEBUG oslo_concurrency.lockutils [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] Releasing lock "d718d866-dd6c-4332-b63a-be6850a5a785" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.175762] env[61839]: DEBUG nova.compute.manager [req-b6288b66-19ee-4c36-ba98-4a94659cc7c5 req-232d726f-5b37-4ea1-891d-679692c50137 service nova] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Detach interface failed, port_id=a9bbfd1a-7c62-42c2-ba9b-587b125ef832, reason: Instance d718d866-dd6c-4332-b63a-be6850a5a785 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1022.518580] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52701cb9-ca4a-2750-803c-451a932603d3, 'name': SearchDatastore_Task, 'duration_secs': 0.009764} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.518856] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.519111] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.519442] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08137f8f-bee3-43c4-bdc1-39aba451ec36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.526012] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1022.526012] env[61839]: value = "task-1314970" [ 1022.526012] env[61839]: _type = "Task" [ 1022.526012] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.533188] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.587055] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.587055] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1022.587055] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.587277] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.587460] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.587577] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.587720] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.587866] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.587999] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1022.588164] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.723704] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2406d9b3-014a-49ba-84da-fc838c8bc6c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.733856] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5962a39-8c13-41a0-82a9-e61cca70197e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.771286] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca57d0d-6fc1-49f5-ab51-c05d410e3454 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.779334] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8aca46b-1737-4317-b7be-d0f154ddb1fc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.794217] env[61839]: DEBUG nova.compute.provider_tree [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.036080] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501453} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.036382] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.036606] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.036870] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ae3a8f3-f86b-4e26-9346-ba697cb3b21f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.043674] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1023.043674] env[61839]: value = "task-1314972" [ 1023.043674] env[61839]: _type = "Task" [ 1023.043674] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.051273] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.090955] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.296996] env[61839]: DEBUG nova.scheduler.client.report [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.553727] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066129} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.554042] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.554760] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb150c37-70d9-4f3b-915b-f98422e114e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.577273] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.577992] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e610c70-33fc-452f-8ab5-6a29605295ef {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.598581] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1023.598581] env[61839]: value = "task-1314973" [ 1023.598581] env[61839]: _type = "Task" [ 1023.598581] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.607411] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314973, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.802246] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.802817] env[61839]: DEBUG nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1023.805463] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.284s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.805692] env[61839]: DEBUG nova.objects.instance [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lazy-loading 'resources' on Instance uuid 8e9bea05-d6d7-40a8-997d-8c952f596f75 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.107842] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314973, 'name': ReconfigVM_Task, 'duration_secs': 0.321875} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.108144] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Reconfigured VM instance instance-00000065 to attach disk [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.108772] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7e69e19-c137-4224-ae6b-1feb4b2f758b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.115373] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1024.115373] env[61839]: value = "task-1314974" [ 1024.115373] env[61839]: _type = "Task" [ 1024.115373] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.126711] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314974, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.308502] env[61839]: DEBUG nova.compute.utils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1024.312535] env[61839]: DEBUG nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1024.312712] env[61839]: DEBUG nova.network.neutron [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1024.349539] env[61839]: DEBUG nova.policy [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b9ca67c278b4cb9a83ec3c6ce42af5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5aba1e066cb4400dbbacc92f393962e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1024.412917] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185e9543-0017-4cee-8032-a659abefa3f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.420291] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f239987e-5159-46c1-bbb2-ab1d268fbd8d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.449585] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2fa921-a0c5-4250-8066-c676fb6e005d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.457529] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbc3851-7bf3-4c26-8e02-0a0d34a43ced {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.471062] env[61839]: DEBUG nova.compute.provider_tree [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.555925] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "dbd34858-9806-4d3f-b829-948651056da2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.555925] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "dbd34858-9806-4d3f-b829-948651056da2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.591013] env[61839]: DEBUG nova.network.neutron [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Successfully created port: 9e8f6e98-9b33-453e-b0c7-e46d907d02dd {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1024.624861] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314974, 'name': Rename_Task, 'duration_secs': 0.134526} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.625245] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.625510] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-017d949e-68d3-4206-9ad7-e9edd21a8cde {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.631841] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1024.631841] env[61839]: value = "task-1314975" [ 1024.631841] env[61839]: _type = "Task" [ 1024.631841] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.639718] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.813946] env[61839]: DEBUG nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1024.974460] env[61839]: DEBUG nova.scheduler.client.report [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1025.058110] env[61839]: DEBUG nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1025.142680] env[61839]: DEBUG oslo_vmware.api [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1314975, 'name': PowerOnVM_Task, 'duration_secs': 0.443949} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.143344] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.143432] env[61839]: INFO nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Took 6.72 seconds to spawn the instance on the hypervisor. [ 1025.143610] env[61839]: DEBUG nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1025.144430] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13edf20c-4647-4118-92b0-fd0b463b3fb6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.478923] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.482766] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.404s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.484406] env[61839]: INFO nova.compute.claims [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.507704] env[61839]: INFO nova.scheduler.client.report [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Deleted allocations for instance 8e9bea05-d6d7-40a8-997d-8c952f596f75 [ 1025.582562] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.661269] env[61839]: INFO nova.compute.manager [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Took 21.00 seconds to build instance. [ 1025.828108] env[61839]: DEBUG nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1025.857293] env[61839]: DEBUG nova.compute.manager [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received event network-changed-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.857486] env[61839]: DEBUG nova.compute.manager [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Refreshing instance network info cache due to event network-changed-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1025.857704] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] Acquiring lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.857839] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] Acquired lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.859746] env[61839]: DEBUG nova.network.neutron [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Refreshing network info cache for port 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1025.865559] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1025.865803] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1025.865983] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.866211] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1025.866407] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.866574] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1025.866790] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1025.866955] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1025.867365] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1025.867556] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1025.868178] env[61839]: DEBUG nova.virt.hardware [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1025.869455] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c1fdce-e426-4f07-addb-c4c0dd8e43c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.883605] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7590e18d-b061-4879-9a2d-5b2d5cda51b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.017369] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2414a939-66a4-4864-a0bf-c00db9fbd87c tempest-ServerDiskConfigTestJSON-292130239 tempest-ServerDiskConfigTestJSON-292130239-project-member] Lock "8e9bea05-d6d7-40a8-997d-8c952f596f75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.444s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.164244] env[61839]: DEBUG oslo_concurrency.lockutils [None req-17010d58-34f2-48b6-9ea5-ab1084631cde tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.511s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.290827] env[61839]: DEBUG nova.network.neutron [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Successfully updated port: 9e8f6e98-9b33-453e-b0c7-e46d907d02dd {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1026.603432] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c132a11-a0e8-4542-901c-73775a52e5fe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.611377] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f2f3b0-f0d8-4ab1-85ab-beb144306d52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.642826] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5e9d98-4175-4a69-ab54-da2eb7f06dbc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.652329] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00458499-00d6-4881-8cff-5d79ffa6f9c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.667181] env[61839]: DEBUG nova.compute.provider_tree [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.792206] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "refresh_cache-a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.792369] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "refresh_cache-a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.792469] env[61839]: DEBUG nova.network.neutron [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.841962] env[61839]: DEBUG nova.network.neutron [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updated VIF entry in instance network info cache for port 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1026.841962] env[61839]: DEBUG nova.network.neutron [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [{"id": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "address": "fa:16:3e:a5:ad:80", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b7a9c37-10", "ovs_interfaceid": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.170037] env[61839]: DEBUG nova.scheduler.client.report [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.342758] env[61839]: DEBUG nova.network.neutron [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1027.345057] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f8fd52c-d0cb-4ca9-88bf-449dbe3e967d req-69d1312b-4908-44c2-9079-68925080b841 service nova] Releasing lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.519089] env[61839]: DEBUG nova.network.neutron [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Updating instance_info_cache with network_info: [{"id": "9e8f6e98-9b33-453e-b0c7-e46d907d02dd", "address": "fa:16:3e:90:ec:c8", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8f6e98-9b", "ovs_interfaceid": "9e8f6e98-9b33-453e-b0c7-e46d907d02dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.675517] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.676105] env[61839]: DEBUG nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1027.679145] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.078s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.680596] env[61839]: INFO nova.compute.claims [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.889600] env[61839]: DEBUG nova.compute.manager [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Received event network-vif-plugged-9e8f6e98-9b33-453e-b0c7-e46d907d02dd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1027.889654] env[61839]: DEBUG oslo_concurrency.lockutils [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] Acquiring lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.889963] env[61839]: DEBUG oslo_concurrency.lockutils [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.890345] env[61839]: DEBUG oslo_concurrency.lockutils [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.890508] env[61839]: DEBUG nova.compute.manager [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] No waiting events found dispatching network-vif-plugged-9e8f6e98-9b33-453e-b0c7-e46d907d02dd {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1027.890812] env[61839]: WARNING nova.compute.manager [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Received unexpected event network-vif-plugged-9e8f6e98-9b33-453e-b0c7-e46d907d02dd for instance with vm_state building and task_state spawning. [ 1027.891121] env[61839]: DEBUG nova.compute.manager [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Received event network-changed-9e8f6e98-9b33-453e-b0c7-e46d907d02dd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1027.891429] env[61839]: DEBUG nova.compute.manager [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Refreshing instance network info cache due to event network-changed-9e8f6e98-9b33-453e-b0c7-e46d907d02dd. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1027.891720] env[61839]: DEBUG oslo_concurrency.lockutils [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] Acquiring lock "refresh_cache-a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.022463] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "refresh_cache-a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.022957] env[61839]: DEBUG nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Instance network_info: |[{"id": "9e8f6e98-9b33-453e-b0c7-e46d907d02dd", "address": "fa:16:3e:90:ec:c8", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8f6e98-9b", "ovs_interfaceid": "9e8f6e98-9b33-453e-b0c7-e46d907d02dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1028.023873] env[61839]: DEBUG oslo_concurrency.lockutils [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] Acquired lock "refresh_cache-a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.024133] env[61839]: DEBUG nova.network.neutron [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Refreshing network info cache for port 9e8f6e98-9b33-453e-b0c7-e46d907d02dd {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.025500] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:ec:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e8f6e98-9b33-453e-b0c7-e46d907d02dd', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.033127] env[61839]: DEBUG oslo.service.loopingcall [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.033602] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1028.033844] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcdb96e1-35b5-49c0-bb75-41cd83ffa846 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.056767] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.056767] env[61839]: value = "task-1314976" [ 1028.056767] env[61839]: _type = "Task" [ 1028.056767] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.065729] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314976, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.184830] env[61839]: DEBUG nova.compute.utils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1028.188644] env[61839]: DEBUG nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1028.189085] env[61839]: DEBUG nova.network.neutron [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.280422] env[61839]: DEBUG nova.policy [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd83e8a2f63d4ae38c5989c1e3824e3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48d8c406ff504d71bba5fb74caf11c14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1028.567732] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314976, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.582977] env[61839]: DEBUG nova.network.neutron [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Successfully created port: 98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.692058] env[61839]: DEBUG nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1028.741020] env[61839]: DEBUG nova.network.neutron [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Updated VIF entry in instance network info cache for port 9e8f6e98-9b33-453e-b0c7-e46d907d02dd. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.742304] env[61839]: DEBUG nova.network.neutron [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Updating instance_info_cache with network_info: [{"id": "9e8f6e98-9b33-453e-b0c7-e46d907d02dd", "address": "fa:16:3e:90:ec:c8", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8f6e98-9b", "ovs_interfaceid": "9e8f6e98-9b33-453e-b0c7-e46d907d02dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.832508] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b597b24-821d-4ead-8773-53ff56c6fe63 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.841617] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f9cf7f-655e-4fe3-bc8e-ccb6cd9b5c4d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.871090] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f2fc2e-61b8-4876-8c79-620b92a3dcbe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.879227] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2a67a8-5991-4be0-994a-586d09a40c35 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.893832] env[61839]: DEBUG nova.compute.provider_tree [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.068026] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314976, 'name': CreateVM_Task, 'duration_secs': 0.545693} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.068127] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1029.068828] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.069009] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.069375] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1029.069641] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69f7f0d8-578d-461d-98a0-809edb8a6a8e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.074541] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1029.074541] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525c51a1-6913-1d93-571d-1fcb013b3a07" [ 1029.074541] env[61839]: _type = "Task" [ 1029.074541] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.081855] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525c51a1-6913-1d93-571d-1fcb013b3a07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.245024] env[61839]: DEBUG oslo_concurrency.lockutils [req-fb3e9640-9c29-4d7c-ab20-533ecb199645 req-6b2c7dc7-a2ef-4658-840a-cf914b225b20 service nova] Releasing lock "refresh_cache-a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.397218] env[61839]: DEBUG nova.scheduler.client.report [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1029.585816] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525c51a1-6913-1d93-571d-1fcb013b3a07, 'name': SearchDatastore_Task, 'duration_secs': 0.038205} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.586134] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.586427] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.586719] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.586882] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.587086] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.587378] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a44c8dae-50be-4eca-9189-9b370a1173e8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.595778] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.595958] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.596677] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca81ede-b686-4e55-84d1-84c9e9f9ea1d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.601600] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1029.601600] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522f0307-4fd1-e502-56bf-6a0511a793c3" [ 1029.601600] env[61839]: _type = "Task" [ 1029.601600] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.610745] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522f0307-4fd1-e502-56bf-6a0511a793c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.705058] env[61839]: DEBUG nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1029.731158] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1029.731503] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1029.731671] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.731861] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1029.732023] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.732180] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1029.732388] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1029.732551] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1029.732720] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1029.732910] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1029.733172] env[61839]: DEBUG nova.virt.hardware [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1029.734034] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409dc67d-50c3-4128-895d-0f1a901b03cf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.741957] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d36124-960a-4ea5-8d44-ec53a1f0e0b5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.902529] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.903807] env[61839]: DEBUG nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1029.905711] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.815s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.905888] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.906055] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1029.906392] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.324s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.907965] env[61839]: INFO nova.compute.claims [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.911551] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f65868-6b14-4720-959e-a2a35fa659f7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.920326] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2c72c1-6b2c-4092-b1d4-efcc2a4816e1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.935569] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667b12dd-cbbb-407d-bbd5-f272cf26d5eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.943018] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4861748f-4fa9-4f30-a54b-ee48d43ad990 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.974373] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180053MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1029.974534] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.976685] env[61839]: DEBUG nova.compute.manager [req-6d5c505e-8572-408a-b6a4-36a62fdb28fe req-0ce88a17-7f69-4f6d-9121-a5d3aa81322b service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Received event network-vif-plugged-98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1029.977038] env[61839]: DEBUG oslo_concurrency.lockutils [req-6d5c505e-8572-408a-b6a4-36a62fdb28fe req-0ce88a17-7f69-4f6d-9121-a5d3aa81322b service nova] Acquiring lock "25c574c4-e39b-4009-a562-a4a5bf74a40c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.977122] env[61839]: DEBUG oslo_concurrency.lockutils [req-6d5c505e-8572-408a-b6a4-36a62fdb28fe req-0ce88a17-7f69-4f6d-9121-a5d3aa81322b service nova] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.977277] env[61839]: DEBUG oslo_concurrency.lockutils [req-6d5c505e-8572-408a-b6a4-36a62fdb28fe req-0ce88a17-7f69-4f6d-9121-a5d3aa81322b service nova] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.977449] env[61839]: DEBUG nova.compute.manager [req-6d5c505e-8572-408a-b6a4-36a62fdb28fe req-0ce88a17-7f69-4f6d-9121-a5d3aa81322b service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] No waiting events found dispatching network-vif-plugged-98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1029.977645] env[61839]: WARNING nova.compute.manager [req-6d5c505e-8572-408a-b6a4-36a62fdb28fe req-0ce88a17-7f69-4f6d-9121-a5d3aa81322b service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Received unexpected event network-vif-plugged-98c98e8e-abd7-4f81-9969-154f0e05908e for instance with vm_state building and task_state spawning. [ 1030.113454] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]522f0307-4fd1-e502-56bf-6a0511a793c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009445} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.114238] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62605b45-5ea8-4636-977c-2754524a832d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.119044] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1030.119044] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c23f84-3188-9d3a-3f4b-5875af2afb0d" [ 1030.119044] env[61839]: _type = "Task" [ 1030.119044] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.126065] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c23f84-3188-9d3a-3f4b-5875af2afb0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.412900] env[61839]: DEBUG nova.compute.utils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1030.414389] env[61839]: DEBUG nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1030.414570] env[61839]: DEBUG nova.network.neutron [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1030.455964] env[61839]: DEBUG nova.policy [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '430b14eaa0e94ef39fb0f95269448ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25686a503d044467a1d641f14e14c65c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1030.519309] env[61839]: DEBUG nova.network.neutron [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Successfully updated port: 98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.630572] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52c23f84-3188-9d3a-3f4b-5875af2afb0d, 'name': SearchDatastore_Task, 'duration_secs': 0.013802} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.631812] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.632099] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156/a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.633469] env[61839]: DEBUG nova.compute.manager [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Received event network-changed-98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.633729] env[61839]: DEBUG nova.compute.manager [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Refreshing instance network info cache due to event network-changed-98c98e8e-abd7-4f81-9969-154f0e05908e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1030.633948] env[61839]: DEBUG oslo_concurrency.lockutils [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] Acquiring lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.634200] env[61839]: DEBUG oslo_concurrency.lockutils [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] Acquired lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.634421] env[61839]: DEBUG nova.network.neutron [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Refreshing network info cache for port 98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1030.635442] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c8242fe-1813-44aa-a994-e3575577e986 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.649818] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1030.649818] env[61839]: value = "task-1314977" [ 1030.649818] env[61839]: _type = "Task" [ 1030.649818] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.658559] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314977, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.672549] env[61839]: DEBUG nova.network.neutron [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1030.699009] env[61839]: DEBUG nova.network.neutron [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Successfully created port: bd6d90a8-d812-4325-b8eb-787e8a4476b5 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1030.918373] env[61839]: DEBUG nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1030.981008] env[61839]: DEBUG nova.network.neutron [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.024570] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.060972] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c19404a-bfa6-44c2-8dc7-3dbca551eca5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.069775] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d871247-f552-477a-bbce-d2ba1186f223 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.103668] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d655c8-102d-407d-ae44-0bef97df3fdc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.111705] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32992fd-37ed-493f-98ae-9eaa2ce67465 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.125827] env[61839]: DEBUG nova.compute.provider_tree [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.160635] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314977, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454001} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.160903] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156/a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1031.161139] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.161390] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2cf0782-d505-4678-a0d0-eb593279696b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.167708] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1031.167708] env[61839]: value = "task-1314978" [ 1031.167708] env[61839]: _type = "Task" [ 1031.167708] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.175569] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314978, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.484967] env[61839]: DEBUG oslo_concurrency.lockutils [req-925edca9-a27b-4bbb-83d4-b00fa0174622 req-008e36d3-66bf-450f-916c-143b7f556f84 service nova] Releasing lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.485429] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.485594] env[61839]: DEBUG nova.network.neutron [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.629135] env[61839]: DEBUG nova.scheduler.client.report [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.679022] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314978, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062219} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.679022] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.679401] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899bfc22-0c6f-4acf-b60c-354db6b4e8fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.700806] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156/a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.701074] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5877a725-09a3-42b5-a33e-9d202797a6e8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.720629] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1031.720629] env[61839]: value = "task-1314979" [ 1031.720629] env[61839]: _type = "Task" [ 1031.720629] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.729443] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314979, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.930322] env[61839]: DEBUG nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1031.959443] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1031.959692] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1031.959856] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.960062] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1031.960226] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.960379] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1031.960595] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1031.960854] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1031.961168] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1031.961461] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1031.961774] env[61839]: DEBUG nova.virt.hardware [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1031.962726] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7ebe3f-769c-419e-8af9-2d08f912b735 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.971297] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fefa4a-1378-4d52-a1d9-be071c2a240b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.016731] env[61839]: DEBUG nova.network.neutron [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.090523] env[61839]: DEBUG nova.compute.manager [req-4a72d508-739b-404c-97ab-33ebb3d2b048 req-30792ebb-8363-4980-8719-bb08e0061c59 service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Received event network-vif-plugged-bd6d90a8-d812-4325-b8eb-787e8a4476b5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1032.090752] env[61839]: DEBUG oslo_concurrency.lockutils [req-4a72d508-739b-404c-97ab-33ebb3d2b048 req-30792ebb-8363-4980-8719-bb08e0061c59 service nova] Acquiring lock "15794971-10d8-4234-8a72-90c940dae90c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.090960] env[61839]: DEBUG oslo_concurrency.lockutils [req-4a72d508-739b-404c-97ab-33ebb3d2b048 req-30792ebb-8363-4980-8719-bb08e0061c59 service nova] Lock "15794971-10d8-4234-8a72-90c940dae90c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.091172] env[61839]: DEBUG oslo_concurrency.lockutils [req-4a72d508-739b-404c-97ab-33ebb3d2b048 req-30792ebb-8363-4980-8719-bb08e0061c59 service nova] Lock "15794971-10d8-4234-8a72-90c940dae90c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.091298] env[61839]: DEBUG nova.compute.manager [req-4a72d508-739b-404c-97ab-33ebb3d2b048 req-30792ebb-8363-4980-8719-bb08e0061c59 service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] No waiting events found dispatching network-vif-plugged-bd6d90a8-d812-4325-b8eb-787e8a4476b5 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1032.091466] env[61839]: WARNING nova.compute.manager [req-4a72d508-739b-404c-97ab-33ebb3d2b048 req-30792ebb-8363-4980-8719-bb08e0061c59 service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Received unexpected event network-vif-plugged-bd6d90a8-d812-4325-b8eb-787e8a4476b5 for instance with vm_state building and task_state spawning. [ 1032.134312] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.135118] env[61839]: DEBUG nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1032.139341] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.165s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.148100] env[61839]: DEBUG nova.network.neutron [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updating instance_info_cache with network_info: [{"id": "98c98e8e-abd7-4f81-9969-154f0e05908e", "address": "fa:16:3e:34:ef:0a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c98e8e-ab", "ovs_interfaceid": "98c98e8e-abd7-4f81-9969-154f0e05908e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.182492] env[61839]: DEBUG nova.network.neutron [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Successfully updated port: bd6d90a8-d812-4325-b8eb-787e8a4476b5 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.230827] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.644725] env[61839]: DEBUG nova.compute.utils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1032.651533] env[61839]: DEBUG nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1032.651719] env[61839]: DEBUG nova.network.neutron [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1032.653584] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.653851] env[61839]: DEBUG nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance network_info: |[{"id": "98c98e8e-abd7-4f81-9969-154f0e05908e", "address": "fa:16:3e:34:ef:0a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c98e8e-ab", "ovs_interfaceid": "98c98e8e-abd7-4f81-9969-154f0e05908e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1032.654246] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:ef:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98c98e8e-abd7-4f81-9969-154f0e05908e', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.662273] env[61839]: DEBUG oslo.service.loopingcall [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.663610] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.663843] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdf27338-06e4-4347-88b9-d33403f93ed0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.683680] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.683680] env[61839]: value = "task-1314980" [ 1032.683680] env[61839]: _type = "Task" [ 1032.683680] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.686840] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-15794971-10d8-4234-8a72-90c940dae90c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.686974] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-15794971-10d8-4234-8a72-90c940dae90c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.687133] env[61839]: DEBUG nova.network.neutron [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.693140] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314980, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.698941] env[61839]: DEBUG nova.policy [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1032.732230] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314979, 'name': ReconfigVM_Task, 'duration_secs': 0.635268} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.732613] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Reconfigured VM instance instance-00000066 to attach disk [datastore2] a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156/a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.733285] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e146825-8bc8-45c0-9cb1-18dded9f7a44 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.739959] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1032.739959] env[61839]: value = "task-1314981" [ 1032.739959] env[61839]: _type = "Task" [ 1032.739959] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.749116] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314981, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.973555] env[61839]: DEBUG nova.network.neutron [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Successfully created port: 7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.155663] env[61839]: DEBUG nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1033.178597] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.178921] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.179013] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 625a8fc1-23fc-4035-855f-3d3a963cdcea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.183207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a1defab7-8433-411d-b7e2-c31f6a34b8e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.183207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.183207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 25c574c4-e39b-4009-a562-a4a5bf74a40c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.183207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 15794971-10d8-4234-8a72-90c940dae90c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.183207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance dbd34858-9806-4d3f-b829-948651056da2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.183207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1033.183207] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1033.199634] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314980, 'name': CreateVM_Task, 'duration_secs': 0.312249} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.200743] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.200743] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.200743] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.200964] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1033.201225] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4b2829c-1906-436c-b5f1-39e8abf06773 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.209334] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1033.209334] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52724399-b851-83db-d66f-310d14815660" [ 1033.209334] env[61839]: _type = "Task" [ 1033.209334] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.218975] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52724399-b851-83db-d66f-310d14815660, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.221699] env[61839]: DEBUG nova.network.neutron [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.251025] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314981, 'name': Rename_Task, 'duration_secs': 0.130281} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.251116] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.251361] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c54dbba4-a8a7-4889-966e-15acfc1db7c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.260294] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1033.260294] env[61839]: value = "task-1314982" [ 1033.260294] env[61839]: _type = "Task" [ 1033.260294] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.276111] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.301291] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167f4ea7-cc78-47bc-9709-6486e17331b6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.308848] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03161bed-349d-4ee8-8fda-0531c0012638 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.340681] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6b0ee2-7360-4f51-91b8-aa23842a2b33 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.348788] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638c7a29-2794-43f9-9504-6a7b847c8851 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.361939] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.397464] env[61839]: DEBUG nova.network.neutron [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Updating instance_info_cache with network_info: [{"id": "bd6d90a8-d812-4325-b8eb-787e8a4476b5", "address": "fa:16:3e:cb:3b:c2", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd6d90a8-d8", "ovs_interfaceid": "bd6d90a8-d812-4325-b8eb-787e8a4476b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.720198] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52724399-b851-83db-d66f-310d14815660, 'name': SearchDatastore_Task, 'duration_secs': 0.009337} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.720530] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.720777] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.721084] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.721177] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.721368] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.721654] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29e29d7a-4006-4e6a-be56-32cf0c0285a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.730024] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.730209] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.730905] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3885263-be94-4877-b958-50ac21037a4f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.737446] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1033.737446] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5233c305-9201-5d10-6af1-7531591177d4" [ 1033.737446] env[61839]: _type = "Task" [ 1033.737446] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.744315] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5233c305-9201-5d10-6af1-7531591177d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.769221] env[61839]: DEBUG oslo_vmware.api [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314982, 'name': PowerOnVM_Task, 'duration_secs': 0.454186} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.769470] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.769683] env[61839]: INFO nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Took 7.94 seconds to spawn the instance on the hypervisor. [ 1033.769855] env[61839]: DEBUG nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1033.770689] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5b1dc5-fa28-4a0e-9ad1-c0df140f9ed2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.866093] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1033.899874] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-15794971-10d8-4234-8a72-90c940dae90c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.900300] env[61839]: DEBUG nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Instance network_info: |[{"id": "bd6d90a8-d812-4325-b8eb-787e8a4476b5", "address": "fa:16:3e:cb:3b:c2", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd6d90a8-d8", "ovs_interfaceid": "bd6d90a8-d812-4325-b8eb-787e8a4476b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1033.901092] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:3b:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd6d90a8-d812-4325-b8eb-787e8a4476b5', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.908605] env[61839]: DEBUG oslo.service.loopingcall [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.908836] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.909505] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bc18622-d293-4ca1-8e88-19d0f5694429 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.930728] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.930728] env[61839]: value = "task-1314983" [ 1033.930728] env[61839]: _type = "Task" [ 1033.930728] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.939660] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314983, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.117039] env[61839]: DEBUG nova.compute.manager [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Received event network-changed-bd6d90a8-d812-4325-b8eb-787e8a4476b5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.117280] env[61839]: DEBUG nova.compute.manager [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Refreshing instance network info cache due to event network-changed-bd6d90a8-d812-4325-b8eb-787e8a4476b5. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1034.117495] env[61839]: DEBUG oslo_concurrency.lockutils [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] Acquiring lock "refresh_cache-15794971-10d8-4234-8a72-90c940dae90c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.117664] env[61839]: DEBUG oslo_concurrency.lockutils [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] Acquired lock "refresh_cache-15794971-10d8-4234-8a72-90c940dae90c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.117831] env[61839]: DEBUG nova.network.neutron [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Refreshing network info cache for port bd6d90a8-d812-4325-b8eb-787e8a4476b5 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.166484] env[61839]: DEBUG nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1034.195717] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1034.196141] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1034.196428] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.196754] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1034.197048] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.197337] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1034.197705] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1034.198019] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1034.198338] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1034.198626] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1034.199433] env[61839]: DEBUG nova.virt.hardware [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1034.199869] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a6bcec-eff6-4880-895b-7272b03a380b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.211421] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083414d0-ff85-4c6b-8276-10c0e943badf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.247784] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5233c305-9201-5d10-6af1-7531591177d4, 'name': SearchDatastore_Task, 'duration_secs': 0.031347} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.248556] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99e173ed-af09-417d-90ba-3c63aeb668d9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.254945] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1034.254945] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d3e208-771e-68d9-24f4-fadacace30f4" [ 1034.254945] env[61839]: _type = "Task" [ 1034.254945] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.262775] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d3e208-771e-68d9-24f4-fadacace30f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.287395] env[61839]: INFO nova.compute.manager [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Took 20.94 seconds to build instance. [ 1034.375382] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1034.375658] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.236s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.444054] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314983, 'name': CreateVM_Task, 'duration_secs': 0.453114} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.444229] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.444944] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.445055] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.445911] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.445911] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-086b43a1-88d5-48d5-9114-1a691344a15f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.450166] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1034.450166] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521169a1-4512-f2cd-87c5-c0db29fa68ba" [ 1034.450166] env[61839]: _type = "Task" [ 1034.450166] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.457553] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521169a1-4512-f2cd-87c5-c0db29fa68ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.469784] env[61839]: DEBUG nova.network.neutron [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Successfully updated port: 7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.765904] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52d3e208-771e-68d9-24f4-fadacace30f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013948} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.766206] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.766547] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.766828] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52093458-d4b1-48d0-af2b-0551fb20f068 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.773643] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1034.773643] env[61839]: value = "task-1314984" [ 1034.773643] env[61839]: _type = "Task" [ 1034.773643] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.781453] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.790215] env[61839]: DEBUG oslo_concurrency.lockutils [None req-21aff03a-aab6-4082-8d60-92ea79709a14 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.457s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.820015] env[61839]: DEBUG nova.network.neutron [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Updated VIF entry in instance network info cache for port bd6d90a8-d812-4325-b8eb-787e8a4476b5. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.820415] env[61839]: DEBUG nova.network.neutron [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Updating instance_info_cache with network_info: [{"id": "bd6d90a8-d812-4325-b8eb-787e8a4476b5", "address": "fa:16:3e:cb:3b:c2", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd6d90a8-d8", "ovs_interfaceid": "bd6d90a8-d812-4325-b8eb-787e8a4476b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.858822] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.859128] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.859347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.859540] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.859717] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.861851] env[61839]: INFO nova.compute.manager [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Terminating instance [ 1034.863671] env[61839]: DEBUG nova.compute.manager [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1034.863871] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1034.864766] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c821c5-c38e-4c85-908f-941a140f1bf9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.872482] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.872706] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6fc1eff4-3c60-44a8-8530-aae0992a7908 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.878569] env[61839]: DEBUG oslo_vmware.api [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1034.878569] env[61839]: value = "task-1314985" [ 1034.878569] env[61839]: _type = "Task" [ 1034.878569] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.886701] env[61839]: DEBUG oslo_vmware.api [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.961497] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521169a1-4512-f2cd-87c5-c0db29fa68ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010016} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.961805] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.962051] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.962384] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.962472] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.962619] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.962911] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f42cf649-06fd-44f6-acff-428c52d1fb79 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.972187] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.972323] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.972469] env[61839]: DEBUG nova.network.neutron [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.973704] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.973894] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.974852] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaadba60-6d93-4b90-8f44-ac0dd0f63855 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.980247] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1034.980247] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525bf0a7-ca7e-0939-d466-b2b3c66ce4b2" [ 1034.980247] env[61839]: _type = "Task" [ 1034.980247] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.989342] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525bf0a7-ca7e-0939-d466-b2b3c66ce4b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.284586] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314984, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.323571] env[61839]: DEBUG oslo_concurrency.lockutils [req-b0609836-55ac-470d-8b72-4d4949d52c7f req-65116439-4e34-469b-9857-c0d5f5eec3ec service nova] Releasing lock "refresh_cache-15794971-10d8-4234-8a72-90c940dae90c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.388253] env[61839]: DEBUG oslo_vmware.api [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314985, 'name': PowerOffVM_Task, 'duration_secs': 0.349276} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.388554] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.388734] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.388996] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79d927d2-86ac-4ed0-8612-d1fc922810f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.477901] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.478144] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.478341] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleting the datastore file [datastore2] a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.478619] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdfba76a-fe15-483b-973b-84cdfe797a58 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.485722] env[61839]: DEBUG oslo_vmware.api [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1035.485722] env[61839]: value = "task-1314987" [ 1035.485722] env[61839]: _type = "Task" [ 1035.485722] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.491869] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525bf0a7-ca7e-0939-d466-b2b3c66ce4b2, 'name': SearchDatastore_Task, 'duration_secs': 0.011569} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.492893] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b1afd2a-a21a-4b2e-9713-dc48071dd0ba {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.497518] env[61839]: DEBUG oslo_vmware.api [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.500606] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1035.500606] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52377492-35d2-2f8d-8f78-f55013167573" [ 1035.500606] env[61839]: _type = "Task" [ 1035.500606] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.509588] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52377492-35d2-2f8d-8f78-f55013167573, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.510306] env[61839]: DEBUG nova.network.neutron [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.673079] env[61839]: DEBUG nova.network.neutron [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.784839] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659726} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.785175] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.785452] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.785734] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65180fcb-be89-4197-95e2-6161829579d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.791902] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1035.791902] env[61839]: value = "task-1314988" [ 1035.791902] env[61839]: _type = "Task" [ 1035.791902] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.799715] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.996055] env[61839]: DEBUG oslo_vmware.api [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1314987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176202} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.996361] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.996577] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.996770] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.996952] env[61839]: INFO nova.compute.manager [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1035.997238] env[61839]: DEBUG oslo.service.loopingcall [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.997488] env[61839]: DEBUG nova.compute.manager [-] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1035.997646] env[61839]: DEBUG nova.network.neutron [-] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1036.011271] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52377492-35d2-2f8d-8f78-f55013167573, 'name': SearchDatastore_Task, 'duration_secs': 0.008515} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.011271] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.011271] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 15794971-10d8-4234-8a72-90c940dae90c/15794971-10d8-4234-8a72-90c940dae90c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.011875] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df18c4a2-cbdc-42cd-909f-96abe25a8946 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.018733] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1036.018733] env[61839]: value = "task-1314989" [ 1036.018733] env[61839]: _type = "Task" [ 1036.018733] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.027361] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.144718] env[61839]: DEBUG nova.compute.manager [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-vif-plugged-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.144791] env[61839]: DEBUG oslo_concurrency.lockutils [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] Acquiring lock "dbd34858-9806-4d3f-b829-948651056da2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.145025] env[61839]: DEBUG oslo_concurrency.lockutils [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] Lock "dbd34858-9806-4d3f-b829-948651056da2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.145205] env[61839]: DEBUG oslo_concurrency.lockutils [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] Lock "dbd34858-9806-4d3f-b829-948651056da2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.145541] env[61839]: DEBUG nova.compute.manager [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] No waiting events found dispatching network-vif-plugged-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1036.145827] env[61839]: WARNING nova.compute.manager [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received unexpected event network-vif-plugged-7ee0f326-ead2-4849-823d-9d652c5c339b for instance with vm_state building and task_state spawning. [ 1036.146066] env[61839]: DEBUG nova.compute.manager [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.146312] env[61839]: DEBUG nova.compute.manager [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing instance network info cache due to event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1036.146604] env[61839]: DEBUG oslo_concurrency.lockutils [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.176194] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.176609] env[61839]: DEBUG nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Instance network_info: |[{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1036.176953] env[61839]: DEBUG oslo_concurrency.lockutils [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.177160] env[61839]: DEBUG nova.network.neutron [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.178605] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:53:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '572b7281-aad3-45fa-9cb2-fc1c70569948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ee0f326-ead2-4849-823d-9d652c5c339b', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.186467] env[61839]: DEBUG oslo.service.loopingcall [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.190554] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbd34858-9806-4d3f-b829-948651056da2] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.191111] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9015559c-81e9-47e7-89f6-7efd3f0342da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.214393] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.214393] env[61839]: value = "task-1314990" [ 1036.214393] env[61839]: _type = "Task" [ 1036.214393] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.223566] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314990, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.303502] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.374398} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.303938] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.304674] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8571c3-7f7a-4ca0-8180-483c780b787e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.330933] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.333516] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85f6c9b1-d94a-462c-afc8-41e90ccbe3cb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.353892] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1036.353892] env[61839]: value = "task-1314991" [ 1036.353892] env[61839]: _type = "Task" [ 1036.353892] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.364153] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314991, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.472827] env[61839]: DEBUG nova.network.neutron [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updated VIF entry in instance network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.473307] env[61839]: DEBUG nova.network.neutron [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.530574] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314989, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.724995] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1314990, 'name': CreateVM_Task, 'duration_secs': 0.501951} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.725244] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbd34858-9806-4d3f-b829-948651056da2] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.726045] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.726236] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.726660] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1036.726970] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8ae0913-0168-450a-a10b-169e299d6811 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.732424] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1036.732424] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52989cc0-6460-0c71-a1b3-130f42dfb082" [ 1036.732424] env[61839]: _type = "Task" [ 1036.732424] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.741514] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52989cc0-6460-0c71-a1b3-130f42dfb082, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.763651] env[61839]: DEBUG nova.network.neutron [-] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.868710] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314991, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.975778] env[61839]: DEBUG oslo_concurrency.lockutils [req-e8d97477-da44-47a6-b168-517562586f9c req-75d1c878-49a5-4108-b69c-18947c317aa3 service nova] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.029600] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314989, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851104} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.029883] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 15794971-10d8-4234-8a72-90c940dae90c/15794971-10d8-4234-8a72-90c940dae90c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.030230] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.030522] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5deb2e6d-9722-4738-95aa-d45e4a33ffe7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.037466] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1037.037466] env[61839]: value = "task-1314992" [ 1037.037466] env[61839]: _type = "Task" [ 1037.037466] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.046914] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314992, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.247085] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52989cc0-6460-0c71-a1b3-130f42dfb082, 'name': SearchDatastore_Task, 'duration_secs': 0.053186} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.247543] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.247917] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.248312] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.248572] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.248875] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.249273] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69186070-58ce-470f-8a84-59152e8fda20 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.259047] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.259333] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.260534] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb4fdc37-0946-4ac9-b3ab-62cd5582cdf2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.267544] env[61839]: INFO nova.compute.manager [-] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Took 1.27 seconds to deallocate network for instance. [ 1037.268015] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1037.268015] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5241deb4-866e-d956-6a13-b6cdde32f454" [ 1037.268015] env[61839]: _type = "Task" [ 1037.268015] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.287657] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5241deb4-866e-d956-6a13-b6cdde32f454, 'name': SearchDatastore_Task, 'duration_secs': 0.008876} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.289033] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-287ff2c8-c6af-412f-9bc5-ff4246af3765 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.296261] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1037.296261] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e36cfb-628a-c254-ebc0-731b8ea26634" [ 1037.296261] env[61839]: _type = "Task" [ 1037.296261] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.306260] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e36cfb-628a-c254-ebc0-731b8ea26634, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.368438] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314991, 'name': ReconfigVM_Task, 'duration_secs': 0.552408} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.368813] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.369474] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b7df234-f590-4a54-8e39-5067d2a5125a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.376060] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1037.376060] env[61839]: value = "task-1314993" [ 1037.376060] env[61839]: _type = "Task" [ 1037.376060] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.384240] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314993, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.547405] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314992, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.246843} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.547686] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.548509] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b788a2-c416-4053-b8e3-48b45877af18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.571214] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 15794971-10d8-4234-8a72-90c940dae90c/15794971-10d8-4234-8a72-90c940dae90c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.571460] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97f808a2-be83-499e-9faf-804374d2e966 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.590841] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1037.590841] env[61839]: value = "task-1314994" [ 1037.590841] env[61839]: _type = "Task" [ 1037.590841] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.598444] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314994, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.782062] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.782290] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.782527] env[61839]: DEBUG nova.objects.instance [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'resources' on Instance uuid a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.812380] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e36cfb-628a-c254-ebc0-731b8ea26634, 'name': SearchDatastore_Task, 'duration_secs': 0.010723} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.812785] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.813265] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] dbd34858-9806-4d3f-b829-948651056da2/dbd34858-9806-4d3f-b829-948651056da2.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.813635] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f6e0e9f-8860-43fe-9568-afe7e392d031 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.822233] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1037.822233] env[61839]: value = "task-1314995" [ 1037.822233] env[61839]: _type = "Task" [ 1037.822233] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.833934] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.886250] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314993, 'name': Rename_Task, 'duration_secs': 0.143509} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.886567] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.886830] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cbf20d3-e51d-4378-ac92-feb2a8c747c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.894798] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1037.894798] env[61839]: value = "task-1314996" [ 1037.894798] env[61839]: _type = "Task" [ 1037.894798] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.903056] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.103711] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314994, 'name': ReconfigVM_Task, 'duration_secs': 0.318347} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.104058] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 15794971-10d8-4234-8a72-90c940dae90c/15794971-10d8-4234-8a72-90c940dae90c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.104841] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f49095de-9f0d-46f1-bc01-7792cc3b8c2f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.111969] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1038.111969] env[61839]: value = "task-1314997" [ 1038.111969] env[61839]: _type = "Task" [ 1038.111969] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.120893] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314997, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.180487] env[61839]: DEBUG nova.compute.manager [req-c99cca27-4f97-4333-a766-ef2298c43c32 req-5f72d668-e031-43b8-b83a-e3a0a980b922 service nova] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Received event network-vif-deleted-9e8f6e98-9b33-453e-b0c7-e46d907d02dd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.335710] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314995, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.405579] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314996, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.407103] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1beb872c-5ca5-448a-bb4f-f771c27ecb0e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.413940] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673d21ce-4ab6-4af7-92ed-9992eda19a34 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.445014] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96986f8c-e3a1-44aa-b426-3980b12cc1a0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.452375] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddce4306-d676-4875-9e0c-8fde6486eb6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.465646] env[61839]: DEBUG nova.compute.provider_tree [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.622871] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314997, 'name': Rename_Task, 'duration_secs': 0.317443} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.623203] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.623512] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f0aea45-e3e4-4062-a1f3-8097626c21df {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.629154] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1038.629154] env[61839]: value = "task-1314998" [ 1038.629154] env[61839]: _type = "Task" [ 1038.629154] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.636557] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.833914] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314995, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522076} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.835060] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] dbd34858-9806-4d3f-b829-948651056da2/dbd34858-9806-4d3f-b829-948651056da2.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.835060] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.835060] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41a90473-3882-40db-8300-5f2b6fbc5fd6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.840925] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1038.840925] env[61839]: value = "task-1314999" [ 1038.840925] env[61839]: _type = "Task" [ 1038.840925] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.848839] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.904728] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314996, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.968982] env[61839]: DEBUG nova.scheduler.client.report [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1039.141543] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314998, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.351810] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1314999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058981} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.352184] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.352860] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947ed0ee-a37c-4080-9d1b-6ac066a7f201 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.374382] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] dbd34858-9806-4d3f-b829-948651056da2/dbd34858-9806-4d3f-b829-948651056da2.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.374651] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbbc0346-fb01-431b-b605-5d0c390b53e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.394029] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1039.394029] env[61839]: value = "task-1315000" [ 1039.394029] env[61839]: _type = "Task" [ 1039.394029] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.407590] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315000, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.407866] env[61839]: DEBUG oslo_vmware.api [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1314996, 'name': PowerOnVM_Task, 'duration_secs': 1.143419} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.408263] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.408490] env[61839]: INFO nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Took 9.70 seconds to spawn the instance on the hypervisor. [ 1039.408683] env[61839]: DEBUG nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1039.409423] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bede2842-e6e2-448a-9d99-c7d00cae1c87 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.473680] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.496513] env[61839]: INFO nova.scheduler.client.report [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted allocations for instance a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156 [ 1039.641061] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314998, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.904075] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315000, 'name': ReconfigVM_Task, 'duration_secs': 0.265624} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.904298] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Reconfigured VM instance instance-00000069 to attach disk [datastore2] dbd34858-9806-4d3f-b829-948651056da2/dbd34858-9806-4d3f-b829-948651056da2.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.904892] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17a3cb1d-47e0-4612-a4e9-a7fa07072ba5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.912109] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1039.912109] env[61839]: value = "task-1315001" [ 1039.912109] env[61839]: _type = "Task" [ 1039.912109] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.925217] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315001, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.930151] env[61839]: INFO nova.compute.manager [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Took 23.87 seconds to build instance. [ 1040.003899] env[61839]: DEBUG oslo_concurrency.lockutils [None req-18b40f8e-0fff-48ae-95ae-02fd0ef4e717 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.145s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.141090] env[61839]: DEBUG oslo_vmware.api [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1314998, 'name': PowerOnVM_Task, 'duration_secs': 1.412519} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.141364] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.141591] env[61839]: INFO nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1040.141817] env[61839]: DEBUG nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1040.142630] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e149f33-fa40-412a-8a83-1eefd5167ecc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.422694] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315001, 'name': Rename_Task, 'duration_secs': 0.139104} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.423050] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.423346] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-311f972c-bfd2-4587-ae88-10ef0e3d6c4b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.430730] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1040.430730] env[61839]: value = "task-1315002" [ 1040.430730] env[61839]: _type = "Task" [ 1040.430730] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.436300] env[61839]: DEBUG nova.compute.manager [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Received event network-changed-98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.436568] env[61839]: DEBUG nova.compute.manager [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Refreshing instance network info cache due to event network-changed-98c98e8e-abd7-4f81-9969-154f0e05908e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1040.436818] env[61839]: DEBUG oslo_concurrency.lockutils [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] Acquiring lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.436978] env[61839]: DEBUG oslo_concurrency.lockutils [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] Acquired lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.437168] env[61839]: DEBUG nova.network.neutron [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Refreshing network info cache for port 98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1040.440981] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b4c7d2ce-8f72-4614-90ab-317067fd69ed tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.386s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.447185] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315002, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.660074] env[61839]: INFO nova.compute.manager [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Took 24.07 seconds to build instance. [ 1040.943450] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315002, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.073172] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "6b193149-68a3-43fc-a331-1b49e0cab484" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.073466] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.160850] env[61839]: DEBUG oslo_concurrency.lockutils [None req-a8d767a2-2dd7-4819-9575-f19f76b11134 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "15794971-10d8-4234-8a72-90c940dae90c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.584s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.184089] env[61839]: DEBUG nova.network.neutron [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updated VIF entry in instance network info cache for port 98c98e8e-abd7-4f81-9969-154f0e05908e. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.184481] env[61839]: DEBUG nova.network.neutron [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updating instance_info_cache with network_info: [{"id": "98c98e8e-abd7-4f81-9969-154f0e05908e", "address": "fa:16:3e:34:ef:0a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c98e8e-ab", "ovs_interfaceid": "98c98e8e-abd7-4f81-9969-154f0e05908e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.440758] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315002, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.575506] env[61839]: DEBUG nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1041.687946] env[61839]: DEBUG oslo_concurrency.lockutils [req-aeb8b4d9-9453-4f8a-9c21-ef0c315b6a3c req-ff43bc1b-a45f-48f5-baa7-b8162b3f9ece service nova] Releasing lock "refresh_cache-25c574c4-e39b-4009-a562-a4a5bf74a40c" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.915874] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db081251-a187-4d65-b263-ad15d437b1b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.923513] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-144392a3-fcb2-40e2-873c-9e623a6e6693 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Suspending the VM {{(pid=61839) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1041.923838] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e93e3e0c-b177-411c-9b3e-c923eb685244 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.930902] env[61839]: DEBUG oslo_vmware.api [None req-144392a3-fcb2-40e2-873c-9e623a6e6693 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1041.930902] env[61839]: value = "task-1315003" [ 1041.930902] env[61839]: _type = "Task" [ 1041.930902] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.941293] env[61839]: DEBUG oslo_vmware.api [None req-144392a3-fcb2-40e2-873c-9e623a6e6693 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315003, 'name': SuspendVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.944354] env[61839]: DEBUG oslo_vmware.api [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315002, 'name': PowerOnVM_Task, 'duration_secs': 1.34512} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.944604] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.944815] env[61839]: INFO nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Took 7.78 seconds to spawn the instance on the hypervisor. [ 1041.945007] env[61839]: DEBUG nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1041.945936] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80f831c-a9d8-45e8-9d2c-437830d2b5c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.100945] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.101255] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.102785] env[61839]: INFO nova.compute.claims [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.442132] env[61839]: DEBUG oslo_vmware.api [None req-144392a3-fcb2-40e2-873c-9e623a6e6693 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315003, 'name': SuspendVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.464876] env[61839]: INFO nova.compute.manager [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Took 16.90 seconds to build instance. [ 1042.941511] env[61839]: DEBUG oslo_vmware.api [None req-144392a3-fcb2-40e2-873c-9e623a6e6693 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315003, 'name': SuspendVM_Task, 'duration_secs': 0.841329} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.941745] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-144392a3-fcb2-40e2-873c-9e623a6e6693 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Suspended the VM {{(pid=61839) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1042.941929] env[61839]: DEBUG nova.compute.manager [None req-144392a3-fcb2-40e2-873c-9e623a6e6693 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1042.942687] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fc3633-b644-4373-8e73-3b558fcac93e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.968632] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ec56a3cb-ba72-45eb-9845-5acb6aea48b4 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "dbd34858-9806-4d3f-b829-948651056da2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.413s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.189087] env[61839]: DEBUG nova.compute.manager [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.189296] env[61839]: DEBUG nova.compute.manager [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing instance network info cache due to event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1043.189515] env[61839]: DEBUG oslo_concurrency.lockutils [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.189682] env[61839]: DEBUG oslo_concurrency.lockutils [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.190833] env[61839]: DEBUG nova.network.neutron [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.228982] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebad06a5-18ab-4df6-aa8b-46db05927701 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.236732] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f659be-477a-41b9-9eeb-35d6e44524cb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.265921] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87182651-8d44-46ec-804d-c3ce00a1b8d2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.273174] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a22dfd-2e06-4bd5-9bb3-b3f83dd95b4b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.288498] env[61839]: DEBUG nova.compute.provider_tree [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.791618] env[61839]: DEBUG nova.scheduler.client.report [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.047793] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.048067] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.158411] env[61839]: DEBUG nova.network.neutron [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updated VIF entry in instance network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.158791] env[61839]: DEBUG nova.network.neutron [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.297032] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.297441] env[61839]: DEBUG nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1044.550340] env[61839]: DEBUG nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1044.627814] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "15794971-10d8-4234-8a72-90c940dae90c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.628082] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "15794971-10d8-4234-8a72-90c940dae90c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.628302] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "15794971-10d8-4234-8a72-90c940dae90c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.628490] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "15794971-10d8-4234-8a72-90c940dae90c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.628664] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "15794971-10d8-4234-8a72-90c940dae90c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.630776] env[61839]: INFO nova.compute.manager [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Terminating instance [ 1044.632469] env[61839]: DEBUG nova.compute.manager [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1044.632886] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.633547] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b8f5f9-60a8-46da-bb7a-e5b464dd3dc8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.641319] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.641560] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a446820c-67db-4afb-a4c3-3b29cb05947c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.661278] env[61839]: DEBUG oslo_concurrency.lockutils [req-b6ea9cdd-0fba-4321-9582-5da1033c6ae2 req-c00b2368-e2cb-4fb2-87f5-32823d37280c service nova] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.716097] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.716346] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.716562] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleting the datastore file [datastore2] 15794971-10d8-4234-8a72-90c940dae90c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.716842] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39a70b66-db7a-4db8-9e97-40f49a05dccb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.722580] env[61839]: DEBUG oslo_vmware.api [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1044.722580] env[61839]: value = "task-1315005" [ 1044.722580] env[61839]: _type = "Task" [ 1044.722580] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.730083] env[61839]: DEBUG oslo_vmware.api [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.804045] env[61839]: DEBUG nova.compute.utils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.804641] env[61839]: DEBUG nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1044.805695] env[61839]: DEBUG nova.network.neutron [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1044.854012] env[61839]: DEBUG nova.policy [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b9ca67c278b4cb9a83ec3c6ce42af5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5aba1e066cb4400dbbacc92f393962e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1045.076431] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.076718] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.078463] env[61839]: INFO nova.compute.claims [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.094913] env[61839]: DEBUG nova.network.neutron [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Successfully created port: 942b0af0-855f-4721-b554-cc5ecb32794e {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.232360] env[61839]: DEBUG oslo_vmware.api [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167621} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.232637] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.232829] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.233054] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.233240] env[61839]: INFO nova.compute.manager [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1045.233485] env[61839]: DEBUG oslo.service.loopingcall [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.233691] env[61839]: DEBUG nova.compute.manager [-] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1045.233775] env[61839]: DEBUG nova.network.neutron [-] [instance: 15794971-10d8-4234-8a72-90c940dae90c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.307805] env[61839]: DEBUG nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1045.493532] env[61839]: DEBUG nova.compute.manager [req-95ff018a-70db-4f61-8ce3-f7fefe4b7ff7 req-a4fd89f6-0f59-4817-a41b-45278c0305f1 service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Received event network-vif-deleted-bd6d90a8-d812-4325-b8eb-787e8a4476b5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1045.493994] env[61839]: INFO nova.compute.manager [req-95ff018a-70db-4f61-8ce3-f7fefe4b7ff7 req-a4fd89f6-0f59-4817-a41b-45278c0305f1 service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Neutron deleted interface bd6d90a8-d812-4325-b8eb-787e8a4476b5; detaching it from the instance and deleting it from the info cache [ 1045.493994] env[61839]: DEBUG nova.network.neutron [req-95ff018a-70db-4f61-8ce3-f7fefe4b7ff7 req-a4fd89f6-0f59-4817-a41b-45278c0305f1 service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.974352] env[61839]: DEBUG nova.network.neutron [-] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.995923] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fead2747-111b-42c6-84ef-1933172678b6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.006263] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6226a4d-285b-49ac-b087-4a6fc3cb4c3e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.034760] env[61839]: DEBUG nova.compute.manager [req-95ff018a-70db-4f61-8ce3-f7fefe4b7ff7 req-a4fd89f6-0f59-4817-a41b-45278c0305f1 service nova] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Detach interface failed, port_id=bd6d90a8-d812-4325-b8eb-787e8a4476b5, reason: Instance 15794971-10d8-4234-8a72-90c940dae90c could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1046.197029] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb0698d-4a09-4581-8542-2b1f3960ba7e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.205556] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c8d2fb-3ab9-4659-a72e-f363adba2906 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.234645] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13a20c6-0ba9-487c-b527-51a5b271b883 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.241655] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d14050-23da-4d09-8405-091549b188c4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.255009] env[61839]: DEBUG nova.compute.provider_tree [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.316239] env[61839]: DEBUG nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1046.341348] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1046.341618] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1046.341780] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.341969] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1046.342136] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.342293] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1046.342503] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1046.342670] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1046.342842] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1046.343022] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1046.343211] env[61839]: DEBUG nova.virt.hardware [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1046.344111] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878fc6f8-260e-4437-a699-02bd052544f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.352859] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d498646a-ba21-4344-a1ac-084f3ddc545b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.477454] env[61839]: INFO nova.compute.manager [-] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Took 1.24 seconds to deallocate network for instance. [ 1046.521155] env[61839]: DEBUG nova.compute.manager [req-e5eccbe3-917b-4379-af5c-5742b544ac7a req-032642f3-a703-4f92-8bf6-9b3a9fd99845 service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Received event network-vif-plugged-942b0af0-855f-4721-b554-cc5ecb32794e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.521155] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5eccbe3-917b-4379-af5c-5742b544ac7a req-032642f3-a703-4f92-8bf6-9b3a9fd99845 service nova] Acquiring lock "6b193149-68a3-43fc-a331-1b49e0cab484-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.521155] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5eccbe3-917b-4379-af5c-5742b544ac7a req-032642f3-a703-4f92-8bf6-9b3a9fd99845 service nova] Lock "6b193149-68a3-43fc-a331-1b49e0cab484-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.521155] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5eccbe3-917b-4379-af5c-5742b544ac7a req-032642f3-a703-4f92-8bf6-9b3a9fd99845 service nova] Lock "6b193149-68a3-43fc-a331-1b49e0cab484-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.521155] env[61839]: DEBUG nova.compute.manager [req-e5eccbe3-917b-4379-af5c-5742b544ac7a req-032642f3-a703-4f92-8bf6-9b3a9fd99845 service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] No waiting events found dispatching network-vif-plugged-942b0af0-855f-4721-b554-cc5ecb32794e {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1046.521155] env[61839]: WARNING nova.compute.manager [req-e5eccbe3-917b-4379-af5c-5742b544ac7a req-032642f3-a703-4f92-8bf6-9b3a9fd99845 service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Received unexpected event network-vif-plugged-942b0af0-855f-4721-b554-cc5ecb32794e for instance with vm_state building and task_state spawning. [ 1046.607120] env[61839]: DEBUG nova.network.neutron [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Successfully updated port: 942b0af0-855f-4721-b554-cc5ecb32794e {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.758505] env[61839]: DEBUG nova.scheduler.client.report [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1046.985656] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.109809] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "refresh_cache-6b193149-68a3-43fc-a331-1b49e0cab484" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.110119] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "refresh_cache-6b193149-68a3-43fc-a331-1b49e0cab484" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.110538] env[61839]: DEBUG nova.network.neutron [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.264027] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.264593] env[61839]: DEBUG nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1047.267281] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.282s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.267506] env[61839]: DEBUG nova.objects.instance [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'resources' on Instance uuid 15794971-10d8-4234-8a72-90c940dae90c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.640140] env[61839]: DEBUG nova.network.neutron [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1047.770245] env[61839]: DEBUG nova.compute.utils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1047.774740] env[61839]: DEBUG nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1047.774740] env[61839]: DEBUG nova.network.neutron [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1047.778978] env[61839]: DEBUG nova.network.neutron [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Updating instance_info_cache with network_info: [{"id": "942b0af0-855f-4721-b554-cc5ecb32794e", "address": "fa:16:3e:7e:d5:e1", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942b0af0-85", "ovs_interfaceid": "942b0af0-855f-4721-b554-cc5ecb32794e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.811545] env[61839]: DEBUG nova.policy [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1047.884941] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaa8e35-62ee-4739-8087-024e4c4f02a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.892874] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1093a344-dec5-412a-ba65-6f4b2607b9fa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.922921] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34289d2d-9121-47af-af55-879231bed532 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.930792] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8543e611-660e-47f3-a37c-f40483377ca7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.946346] env[61839]: DEBUG nova.compute.provider_tree [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.073839] env[61839]: DEBUG nova.network.neutron [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Successfully created port: df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1048.275237] env[61839]: DEBUG nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1048.280710] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "refresh_cache-6b193149-68a3-43fc-a331-1b49e0cab484" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.281016] env[61839]: DEBUG nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Instance network_info: |[{"id": "942b0af0-855f-4721-b554-cc5ecb32794e", "address": "fa:16:3e:7e:d5:e1", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942b0af0-85", "ovs_interfaceid": "942b0af0-855f-4721-b554-cc5ecb32794e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1048.281431] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:d5:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '942b0af0-855f-4721-b554-cc5ecb32794e', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1048.288917] env[61839]: DEBUG oslo.service.loopingcall [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1048.289102] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1048.289261] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e37324d7-4043-4946-b48d-9e4e2b7721a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.309721] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1048.309721] env[61839]: value = "task-1315006" [ 1048.309721] env[61839]: _type = "Task" [ 1048.309721] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.318188] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315006, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.449829] env[61839]: DEBUG nova.scheduler.client.report [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1048.548598] env[61839]: DEBUG nova.compute.manager [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Received event network-changed-942b0af0-855f-4721-b554-cc5ecb32794e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1048.548831] env[61839]: DEBUG nova.compute.manager [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Refreshing instance network info cache due to event network-changed-942b0af0-855f-4721-b554-cc5ecb32794e. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1048.549040] env[61839]: DEBUG oslo_concurrency.lockutils [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] Acquiring lock "refresh_cache-6b193149-68a3-43fc-a331-1b49e0cab484" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.549202] env[61839]: DEBUG oslo_concurrency.lockutils [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] Acquired lock "refresh_cache-6b193149-68a3-43fc-a331-1b49e0cab484" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.549378] env[61839]: DEBUG nova.network.neutron [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Refreshing network info cache for port 942b0af0-855f-4721-b554-cc5ecb32794e {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1048.821974] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315006, 'name': CreateVM_Task, 'duration_secs': 0.301208} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.821974] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1048.822844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.823043] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.823326] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1048.823635] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a682359-a838-462c-b889-6accdcd25b9b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.828348] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1048.828348] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216bdef-5e3c-54ef-4d59-4e3550eced13" [ 1048.828348] env[61839]: _type = "Task" [ 1048.828348] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.837320] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216bdef-5e3c-54ef-4d59-4e3550eced13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.955275] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.688s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.973598] env[61839]: INFO nova.scheduler.client.report [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted allocations for instance 15794971-10d8-4234-8a72-90c940dae90c [ 1049.279743] env[61839]: DEBUG nova.network.neutron [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Updated VIF entry in instance network info cache for port 942b0af0-855f-4721-b554-cc5ecb32794e. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1049.279984] env[61839]: DEBUG nova.network.neutron [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Updating instance_info_cache with network_info: [{"id": "942b0af0-855f-4721-b554-cc5ecb32794e", "address": "fa:16:3e:7e:d5:e1", "network": {"id": "100bb090-83a3-4de8-bd2b-f65900cd5a3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-901620943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5aba1e066cb4400dbbacc92f393962e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942b0af0-85", "ovs_interfaceid": "942b0af0-855f-4721-b554-cc5ecb32794e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.287964] env[61839]: DEBUG nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1049.313029] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1049.313306] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1049.313471] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.313660] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1049.313814] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.313967] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1049.314191] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1049.314359] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1049.314534] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1049.314703] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1049.314880] env[61839]: DEBUG nova.virt.hardware [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1049.315820] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980b5d05-d250-49bf-a844-7ab83b00e606 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.324499] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd6f2fb-b892-4547-b4a2-d9b6eddc347e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.346180] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5216bdef-5e3c-54ef-4d59-4e3550eced13, 'name': SearchDatastore_Task, 'duration_secs': 0.010683} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.346499] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.346756] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1049.347011] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.347175] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.347359] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.347613] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd90e071-edd8-4189-9a4b-5ae22583e87e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.359240] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.359426] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1049.360146] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e15170db-6007-4693-862b-a01a48ee7a36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.364923] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1049.364923] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52678bf6-200c-bd39-ee11-579680a0464b" [ 1049.364923] env[61839]: _type = "Task" [ 1049.364923] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.374042] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52678bf6-200c-bd39-ee11-579680a0464b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.456042] env[61839]: DEBUG nova.compute.manager [req-d71a8e39-61bb-49a9-8024-554e5057d1cd req-3b3b0c28-5e86-4e28-ae3a-c50100d2e0f1 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-vif-plugged-df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.456094] env[61839]: DEBUG oslo_concurrency.lockutils [req-d71a8e39-61bb-49a9-8024-554e5057d1cd req-3b3b0c28-5e86-4e28-ae3a-c50100d2e0f1 service nova] Acquiring lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.456344] env[61839]: DEBUG oslo_concurrency.lockutils [req-d71a8e39-61bb-49a9-8024-554e5057d1cd req-3b3b0c28-5e86-4e28-ae3a-c50100d2e0f1 service nova] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.456575] env[61839]: DEBUG oslo_concurrency.lockutils [req-d71a8e39-61bb-49a9-8024-554e5057d1cd req-3b3b0c28-5e86-4e28-ae3a-c50100d2e0f1 service nova] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.456761] env[61839]: DEBUG nova.compute.manager [req-d71a8e39-61bb-49a9-8024-554e5057d1cd req-3b3b0c28-5e86-4e28-ae3a-c50100d2e0f1 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] No waiting events found dispatching network-vif-plugged-df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1049.456953] env[61839]: WARNING nova.compute.manager [req-d71a8e39-61bb-49a9-8024-554e5057d1cd req-3b3b0c28-5e86-4e28-ae3a-c50100d2e0f1 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received unexpected event network-vif-plugged-df30d6b1-3fc6-465f-9b51-353f874ccd30 for instance with vm_state building and task_state spawning. [ 1049.481488] env[61839]: DEBUG oslo_concurrency.lockutils [None req-61a85ce9-a07c-4a15-a7e9-2624e3918510 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "15794971-10d8-4234-8a72-90c940dae90c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.853s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.545273] env[61839]: DEBUG nova.network.neutron [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Successfully updated port: df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1049.783996] env[61839]: DEBUG oslo_concurrency.lockutils [req-359c9472-80e0-489e-8d55-5c156ffb4dba req-c8b9d3b1-d1a4-4a56-a1a2-2076e38ccfba service nova] Releasing lock "refresh_cache-6b193149-68a3-43fc-a331-1b49e0cab484" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.877000] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52678bf6-200c-bd39-ee11-579680a0464b, 'name': SearchDatastore_Task, 'duration_secs': 0.008551} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.877966] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba33778e-cc32-48b0-8fa4-1a595fbacd52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.883516] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1049.883516] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b5dfdb-cf65-c328-1e45-766d3a05eea7" [ 1049.883516] env[61839]: _type = "Task" [ 1049.883516] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.892821] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b5dfdb-cf65-c328-1e45-766d3a05eea7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.047642] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.047811] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.048016] env[61839]: DEBUG nova.network.neutron [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.393861] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b5dfdb-cf65-c328-1e45-766d3a05eea7, 'name': SearchDatastore_Task, 'duration_secs': 0.009888} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.394273] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.394601] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 6b193149-68a3-43fc-a331-1b49e0cab484/6b193149-68a3-43fc-a331-1b49e0cab484.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.394789] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4a11a58-a661-440f-809f-f05068125cee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.403364] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1050.403364] env[61839]: value = "task-1315007" [ 1050.403364] env[61839]: _type = "Task" [ 1050.403364] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.410887] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.505097] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "506ea987-ea38-444b-81b7-f5343de14e4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.505338] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.580898] env[61839]: DEBUG nova.network.neutron [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1050.730151] env[61839]: DEBUG nova.network.neutron [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.912956] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315007, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.008333] env[61839]: DEBUG nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1051.232830] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.233234] env[61839]: DEBUG nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Instance network_info: |[{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1051.233756] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:09:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '572b7281-aad3-45fa-9cb2-fc1c70569948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df30d6b1-3fc6-465f-9b51-353f874ccd30', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1051.241323] env[61839]: DEBUG oslo.service.loopingcall [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1051.241558] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1051.241793] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27362d83-3530-4abe-bf54-94fb916b6013 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.261778] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1051.261778] env[61839]: value = "task-1315008" [ 1051.261778] env[61839]: _type = "Task" [ 1051.261778] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.273554] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315008, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.414065] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51265} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.414065] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 6b193149-68a3-43fc-a331-1b49e0cab484/6b193149-68a3-43fc-a331-1b49e0cab484.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.414065] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.414571] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91ec3e23-5d72-4421-b5d9-15be17e65b77 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.420293] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1051.420293] env[61839]: value = "task-1315009" [ 1051.420293] env[61839]: _type = "Task" [ 1051.420293] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.428323] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.482243] env[61839]: DEBUG nova.compute.manager [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.482459] env[61839]: DEBUG nova.compute.manager [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing instance network info cache due to event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1051.482707] env[61839]: DEBUG oslo_concurrency.lockutils [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.482934] env[61839]: DEBUG oslo_concurrency.lockutils [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.483153] env[61839]: DEBUG nova.network.neutron [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1051.530802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.531149] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.532726] env[61839]: INFO nova.compute.claims [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1051.771996] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315008, 'name': CreateVM_Task, 'duration_secs': 0.392945} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.772157] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1051.772802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.772974] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.773315] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1051.773568] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-243e3022-acab-43ac-b56c-5823feb8b055 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.777791] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1051.777791] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bae8aa-e82c-61cb-b5c7-edb707d2e8f3" [ 1051.777791] env[61839]: _type = "Task" [ 1051.777791] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.784673] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bae8aa-e82c-61cb-b5c7-edb707d2e8f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.930076] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06071} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.930360] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1051.931203] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7162842a-3447-42c5-b66a-4bdd8a8dedaa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.952916] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 6b193149-68a3-43fc-a331-1b49e0cab484/6b193149-68a3-43fc-a331-1b49e0cab484.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1051.953150] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f4d099c-f261-4f11-9395-0e94b0181cc1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.971311] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1051.971311] env[61839]: value = "task-1315010" [ 1051.971311] env[61839]: _type = "Task" [ 1051.971311] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.978933] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315010, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.174275] env[61839]: DEBUG nova.network.neutron [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updated VIF entry in instance network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.174712] env[61839]: DEBUG nova.network.neutron [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.289594] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52bae8aa-e82c-61cb-b5c7-edb707d2e8f3, 'name': SearchDatastore_Task, 'duration_secs': 0.052359} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.289844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.290093] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.290343] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.290529] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.290680] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.291010] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a719358-84a4-4f36-8da0-be05f6748549 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.330477] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.330728] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1052.331473] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa2d6ef0-76fb-4ea9-a211-e8793ba96538 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.337263] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1052.337263] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521ddc49-882b-513a-e89d-a2c052fb1661" [ 1052.337263] env[61839]: _type = "Task" [ 1052.337263] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.344841] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521ddc49-882b-513a-e89d-a2c052fb1661, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.481305] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315010, 'name': ReconfigVM_Task, 'duration_secs': 0.311974} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.481658] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 6b193149-68a3-43fc-a331-1b49e0cab484/6b193149-68a3-43fc-a331-1b49e0cab484.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1052.482270] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5cc3a933-1c7d-4260-aa02-30973ecd90f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.488606] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1052.488606] env[61839]: value = "task-1315011" [ 1052.488606] env[61839]: _type = "Task" [ 1052.488606] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.495960] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315011, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.659308] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6cbed3-e085-4de5-8f0a-5ba2a2805153 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.666716] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4b3e9c-0dbc-4de5-ade6-9caba88843a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.694841] env[61839]: DEBUG oslo_concurrency.lockutils [req-96713558-e44a-4f24-af23-93e26c6b7251 req-13f1f323-c24e-4db3-8a5c-32be0bc50a55 service nova] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.695830] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fc2ffd-9a9a-420f-a18a-a6fabd30de9a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.702785] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ef72b2-d419-4c8d-b341-30d20b9bd6f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.717039] env[61839]: DEBUG nova.compute.provider_tree [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.847966] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521ddc49-882b-513a-e89d-a2c052fb1661, 'name': SearchDatastore_Task, 'duration_secs': 0.27296} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.848788] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15968e51-1ff5-4d7d-93cb-9d0bb774ed9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.853750] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1052.853750] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5213fde7-3b3b-e79e-60ab-b628a5715c50" [ 1052.853750] env[61839]: _type = "Task" [ 1052.853750] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.862683] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5213fde7-3b3b-e79e-60ab-b628a5715c50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.998646] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315011, 'name': Rename_Task, 'duration_secs': 0.161842} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.998917] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.999181] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-671b0da3-d0ff-4462-a48e-e549059efe27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.005363] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1053.005363] env[61839]: value = "task-1315012" [ 1053.005363] env[61839]: _type = "Task" [ 1053.005363] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.012756] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.220717] env[61839]: DEBUG nova.scheduler.client.report [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1053.364407] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5213fde7-3b3b-e79e-60ab-b628a5715c50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.515269] env[61839]: DEBUG oslo_vmware.api [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315012, 'name': PowerOnVM_Task, 'duration_secs': 0.478692} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.515600] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.515797] env[61839]: INFO nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Took 7.20 seconds to spawn the instance on the hypervisor. [ 1053.515984] env[61839]: DEBUG nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1053.516804] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca95711d-89bd-474b-a1e2-d64d7d6d6bf7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.725154] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.725754] env[61839]: DEBUG nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1053.864758] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5213fde7-3b3b-e79e-60ab-b628a5715c50, 'name': SearchDatastore_Task, 'duration_secs': 0.665794} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.864999] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.865281] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 6b12ef55-b566-4a74-a794-b4e4c41debe1/6b12ef55-b566-4a74-a794-b4e4c41debe1.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1053.865540] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62d2e1b7-c075-437b-a463-0f2f7e54eea1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.871938] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1053.871938] env[61839]: value = "task-1315013" [ 1053.871938] env[61839]: _type = "Task" [ 1053.871938] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.879128] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315013, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.036840] env[61839]: INFO nova.compute.manager [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Took 11.96 seconds to build instance. [ 1054.230506] env[61839]: DEBUG nova.compute.utils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1054.232017] env[61839]: DEBUG nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1054.232660] env[61839]: DEBUG nova.network.neutron [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1054.279883] env[61839]: DEBUG nova.policy [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '430b14eaa0e94ef39fb0f95269448ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25686a503d044467a1d641f14e14c65c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1054.382623] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315013, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480766} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.382965] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 6b12ef55-b566-4a74-a794-b4e4c41debe1/6b12ef55-b566-4a74-a794-b4e4c41debe1.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1054.383218] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.383477] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7927b217-d896-4154-835d-09bf0abf2b78 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.389858] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1054.389858] env[61839]: value = "task-1315014" [ 1054.389858] env[61839]: _type = "Task" [ 1054.389858] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.398785] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315014, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.535623] env[61839]: DEBUG nova.network.neutron [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Successfully created port: 8ae9c8f9-030e-48f6-9368-4348d164e3b9 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1054.537846] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "6b193149-68a3-43fc-a331-1b49e0cab484" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.538234] env[61839]: DEBUG oslo_concurrency.lockutils [None req-32e4c9e8-c936-4782-9ca4-c5a97eef1748 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.465s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.538485] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.538665] env[61839]: DEBUG nova.compute.manager [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1054.539820] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abd5db5-afc6-47b7-912c-768bc395255f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.547739] env[61839]: DEBUG nova.compute.manager [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61839) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1054.547739] env[61839]: DEBUG nova.objects.instance [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'flavor' on Instance uuid 6b193149-68a3-43fc-a331-1b49e0cab484 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.736173] env[61839]: DEBUG nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1054.900873] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315014, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070781} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.900873] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1054.901526] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b02a9c-c7ab-4b55-96ca-8e4706e8bb91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.923900] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 6b12ef55-b566-4a74-a794-b4e4c41debe1/6b12ef55-b566-4a74-a794-b4e4c41debe1.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.924187] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55198fe8-5290-4f50-838f-e2ac5a57af52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.944574] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1054.944574] env[61839]: value = "task-1315015" [ 1054.944574] env[61839]: _type = "Task" [ 1054.944574] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.952680] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315015, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.052593] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.052593] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9213faa5-b04a-4711-8ad8-ae085a0b606c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.060048] env[61839]: DEBUG oslo_vmware.api [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1055.060048] env[61839]: value = "task-1315016" [ 1055.060048] env[61839]: _type = "Task" [ 1055.060048] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.067577] env[61839]: DEBUG oslo_vmware.api [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.454812] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315015, 'name': ReconfigVM_Task, 'duration_secs': 0.270208} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.454812] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 6b12ef55-b566-4a74-a794-b4e4c41debe1/6b12ef55-b566-4a74-a794-b4e4c41debe1.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.455468] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd1430bf-5034-4ba6-8fef-3f8c1605528f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.461920] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1055.461920] env[61839]: value = "task-1315017" [ 1055.461920] env[61839]: _type = "Task" [ 1055.461920] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.470127] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315017, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.569270] env[61839]: DEBUG oslo_vmware.api [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315016, 'name': PowerOffVM_Task, 'duration_secs': 0.290257} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.569608] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.569662] env[61839]: DEBUG nova.compute.manager [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1055.570453] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1af145-c7aa-4177-87fa-e5d16d840cb2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.746267] env[61839]: DEBUG nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1055.773285] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1055.773607] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1055.773826] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1055.774101] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1055.774309] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1055.774511] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1055.774807] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1055.775064] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1055.775320] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1055.775564] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1055.775832] env[61839]: DEBUG nova.virt.hardware [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1055.776992] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19caadd2-7312-44e7-8a5b-4cba71527865 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.785867] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3452a176-1064-492b-8575-1b74dc2dc4e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.913768] env[61839]: DEBUG nova.compute.manager [req-0e89c2a0-fd23-4900-829c-108e25ac9e35 req-7c32a9c5-dab6-4af0-83e1-824507283e4d service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Received event network-vif-plugged-8ae9c8f9-030e-48f6-9368-4348d164e3b9 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.914078] env[61839]: DEBUG oslo_concurrency.lockutils [req-0e89c2a0-fd23-4900-829c-108e25ac9e35 req-7c32a9c5-dab6-4af0-83e1-824507283e4d service nova] Acquiring lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.914307] env[61839]: DEBUG oslo_concurrency.lockutils [req-0e89c2a0-fd23-4900-829c-108e25ac9e35 req-7c32a9c5-dab6-4af0-83e1-824507283e4d service nova] Lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.914482] env[61839]: DEBUG oslo_concurrency.lockutils [req-0e89c2a0-fd23-4900-829c-108e25ac9e35 req-7c32a9c5-dab6-4af0-83e1-824507283e4d service nova] Lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.914672] env[61839]: DEBUG nova.compute.manager [req-0e89c2a0-fd23-4900-829c-108e25ac9e35 req-7c32a9c5-dab6-4af0-83e1-824507283e4d service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] No waiting events found dispatching network-vif-plugged-8ae9c8f9-030e-48f6-9368-4348d164e3b9 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1055.914856] env[61839]: WARNING nova.compute.manager [req-0e89c2a0-fd23-4900-829c-108e25ac9e35 req-7c32a9c5-dab6-4af0-83e1-824507283e4d service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Received unexpected event network-vif-plugged-8ae9c8f9-030e-48f6-9368-4348d164e3b9 for instance with vm_state building and task_state spawning. [ 1055.972361] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315017, 'name': Rename_Task, 'duration_secs': 0.249234} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.972634] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.972883] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0de035c-1ec1-4052-8608-4731dee91206 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.979609] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1055.979609] env[61839]: value = "task-1315018" [ 1055.979609] env[61839]: _type = "Task" [ 1055.979609] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.988239] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.005040] env[61839]: DEBUG nova.network.neutron [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Successfully updated port: 8ae9c8f9-030e-48f6-9368-4348d164e3b9 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1056.082108] env[61839]: DEBUG oslo_concurrency.lockutils [None req-b70bf7c0-6be3-4fd4-a3fa-374aed7c48a8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.543s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.490655] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315018, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.510027] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.510027] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.510403] env[61839]: DEBUG nova.network.neutron [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.990242] env[61839]: DEBUG oslo_vmware.api [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315018, 'name': PowerOnVM_Task, 'duration_secs': 0.61684} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.990606] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.990774] env[61839]: INFO nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Took 7.70 seconds to spawn the instance on the hypervisor. [ 1056.990955] env[61839]: DEBUG nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1056.991727] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10415b85-193c-47c2-a498-765bc17f7f0b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.051064] env[61839]: DEBUG nova.network.neutron [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1057.174029] env[61839]: DEBUG nova.network.neutron [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance_info_cache with network_info: [{"id": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "address": "fa:16:3e:3f:6d:02", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae9c8f9-03", "ovs_interfaceid": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.238641] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "6b193149-68a3-43fc-a331-1b49e0cab484" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.238900] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.239250] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "6b193149-68a3-43fc-a331-1b49e0cab484-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.239460] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.239638] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.241553] env[61839]: INFO nova.compute.manager [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Terminating instance [ 1057.243184] env[61839]: DEBUG nova.compute.manager [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1057.243378] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.244217] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3c6457-7efa-4657-b7c9-c3712277c396 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.251740] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.251959] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02c33c3d-4f74-4d71-aa8e-7b821491f3a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.331726] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.331968] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.332185] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleting the datastore file [datastore1] 6b193149-68a3-43fc-a331-1b49e0cab484 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.332457] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c63962c6-eb1c-4a55-8bda-c6086d503e86 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.339292] env[61839]: DEBUG oslo_vmware.api [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1057.339292] env[61839]: value = "task-1315020" [ 1057.339292] env[61839]: _type = "Task" [ 1057.339292] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.346879] env[61839]: DEBUG oslo_vmware.api [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315020, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.512113] env[61839]: INFO nova.compute.manager [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Took 12.46 seconds to build instance. [ 1057.678028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.678028] env[61839]: DEBUG nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Instance network_info: |[{"id": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "address": "fa:16:3e:3f:6d:02", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae9c8f9-03", "ovs_interfaceid": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1057.678028] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:6d:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ae9c8f9-030e-48f6-9368-4348d164e3b9', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1057.686894] env[61839]: DEBUG oslo.service.loopingcall [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.687132] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1057.687364] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ae4737f-c958-4df7-bd48-496b2f6d5486 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.708052] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1057.708052] env[61839]: value = "task-1315021" [ 1057.708052] env[61839]: _type = "Task" [ 1057.708052] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.715288] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315021, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.760522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.761140] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.849437] env[61839]: DEBUG oslo_vmware.api [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315020, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184154} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.849709] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.849928] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.850206] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.850399] env[61839]: INFO nova.compute.manager [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1057.850656] env[61839]: DEBUG oslo.service.loopingcall [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.850861] env[61839]: DEBUG nova.compute.manager [-] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1057.850958] env[61839]: DEBUG nova.network.neutron [-] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.993252] env[61839]: DEBUG nova.compute.manager [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Received event network-changed-8ae9c8f9-030e-48f6-9368-4348d164e3b9 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.993497] env[61839]: DEBUG nova.compute.manager [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Refreshing instance network info cache due to event network-changed-8ae9c8f9-030e-48f6-9368-4348d164e3b9. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1057.993801] env[61839]: DEBUG oslo_concurrency.lockutils [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] Acquiring lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.993985] env[61839]: DEBUG oslo_concurrency.lockutils [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] Acquired lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.994592] env[61839]: DEBUG nova.network.neutron [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Refreshing network info cache for port 8ae9c8f9-030e-48f6-9368-4348d164e3b9 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.014355] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9f5969a7-8816-49c0-8a86-1abb13ff475b tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.966s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.218488] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315021, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.264031] env[61839]: DEBUG nova.compute.utils [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1058.477859] env[61839]: DEBUG nova.compute.manager [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1058.478135] env[61839]: DEBUG nova.compute.manager [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing instance network info cache due to event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1058.478371] env[61839]: DEBUG oslo_concurrency.lockutils [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.478548] env[61839]: DEBUG oslo_concurrency.lockutils [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.478676] env[61839]: DEBUG nova.network.neutron [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.596827] env[61839]: DEBUG nova.network.neutron [-] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.706770] env[61839]: DEBUG nova.network.neutron [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updated VIF entry in instance network info cache for port 8ae9c8f9-030e-48f6-9368-4348d164e3b9. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1058.707474] env[61839]: DEBUG nova.network.neutron [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance_info_cache with network_info: [{"id": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "address": "fa:16:3e:3f:6d:02", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae9c8f9-03", "ovs_interfaceid": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.717427] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315021, 'name': CreateVM_Task, 'duration_secs': 0.633139} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.717591] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1058.718947] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.719161] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.719562] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1058.720272] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14b7c758-3efb-4ecb-b90c-ee05b05976bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.725242] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1058.725242] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526cdbdb-3cac-268c-9898-38a5924c6d4b" [ 1058.725242] env[61839]: _type = "Task" [ 1058.725242] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.733693] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526cdbdb-3cac-268c-9898-38a5924c6d4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.769070] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.099981] env[61839]: INFO nova.compute.manager [-] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Took 1.25 seconds to deallocate network for instance. [ 1059.198789] env[61839]: DEBUG nova.network.neutron [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updated VIF entry in instance network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.199211] env[61839]: DEBUG nova.network.neutron [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.209714] env[61839]: DEBUG oslo_concurrency.lockutils [req-b1888e77-15dd-4b72-a2e3-d54093d7283e req-0ce59e1d-00a7-45dd-8aec-66c70d3ba442 service nova] Releasing lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.236534] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526cdbdb-3cac-268c-9898-38a5924c6d4b, 'name': SearchDatastore_Task, 'duration_secs': 0.00956} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.236848] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.237104] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1059.237345] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.237496] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.237676] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1059.237938] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c5bf3f3-3f0d-4e19-9465-c103a54e4126 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.245756] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1059.245948] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1059.246668] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05457e1f-f084-4e81-9568-2742a035d991 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.252805] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1059.252805] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef9620-3d4c-e15a-275e-64a5045513c5" [ 1059.252805] env[61839]: _type = "Task" [ 1059.252805] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.260076] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef9620-3d4c-e15a-275e-64a5045513c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.606343] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.606617] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.606932] env[61839]: DEBUG nova.objects.instance [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'resources' on Instance uuid 6b193149-68a3-43fc-a331-1b49e0cab484 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.701985] env[61839]: DEBUG oslo_concurrency.lockutils [req-5a518cc0-f98a-4e27-b58e-a97c43f9b9eb req-28228f2b-f120-406d-a7a6-6551bdde8d68 service nova] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.763937] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef9620-3d4c-e15a-275e-64a5045513c5, 'name': SearchDatastore_Task, 'duration_secs': 0.008888} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.764770] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dd3aa9e-5a8a-4deb-b7c3-3c74f2ba5912 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.770170] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1059.770170] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5286e322-19a5-4f83-154a-b27031ce48b0" [ 1059.770170] env[61839]: _type = "Task" [ 1059.770170] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.777619] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5286e322-19a5-4f83-154a-b27031ce48b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.842522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.842733] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.843010] env[61839]: INFO nova.compute.manager [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Attaching volume d4d58d24-9376-47ab-ae70-ee11dcb0e43c to /dev/sdb [ 1059.871749] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502dba93-9fe1-42e6-a72e-b8cbc08b2a21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.879069] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c216008d-6067-4eb0-95bc-03cc633870ed {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.892135] env[61839]: DEBUG nova.virt.block_device [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating existing volume attachment record: 4eb434b1-a0bd-483e-b7a5-87c852a6de13 {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1060.022374] env[61839]: DEBUG nova.compute.manager [req-2b9c9942-5af5-4c1d-a799-c21638f058e8 req-6f85d415-3335-4c39-bcec-cb3785f694b7 service nova] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Received event network-vif-deleted-942b0af0-855f-4721-b554-cc5ecb32794e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1060.238809] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6f9dd4-bd1b-4265-ac33-cbb5de95edd8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.246070] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b97cc92-769c-4806-b7e2-33fd98e0ee88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.279844] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e680f1-78cf-4065-964a-8008dc29b70f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.289473] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5286e322-19a5-4f83-154a-b27031ce48b0, 'name': SearchDatastore_Task, 'duration_secs': 0.00948} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.289801] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.290105] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 506ea987-ea38-444b-81b7-f5343de14e4f/506ea987-ea38-444b-81b7-f5343de14e4f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1060.291285] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7e7d9f-8c63-4662-b976-808a2ee74f7b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.294856] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebce6cb1-b7bf-4cf8-ba1c-e5d6c73dd3a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.300995] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1060.300995] env[61839]: value = "task-1315025" [ 1060.300995] env[61839]: _type = "Task" [ 1060.300995] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.308569] env[61839]: DEBUG nova.compute.provider_tree [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.317203] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.511341] env[61839]: DEBUG nova.compute.manager [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1060.511552] env[61839]: DEBUG nova.compute.manager [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing instance network info cache due to event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1060.511800] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.511996] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.512214] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1060.816034] env[61839]: DEBUG nova.scheduler.client.report [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1060.823906] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507949} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.824406] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 506ea987-ea38-444b-81b7-f5343de14e4f/506ea987-ea38-444b-81b7-f5343de14e4f.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1060.824965] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1060.825407] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76637e96-80f5-4677-908d-e5e2535c8f27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.834033] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1060.834033] env[61839]: value = "task-1315026" [ 1060.834033] env[61839]: _type = "Task" [ 1060.834033] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.843736] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.207473] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updated VIF entry in instance network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1061.207853] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.319852] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.713s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.339009] env[61839]: INFO nova.scheduler.client.report [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted allocations for instance 6b193149-68a3-43fc-a331-1b49e0cab484 [ 1061.345283] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060523} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.345746] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1061.346560] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4886256-1b44-41a2-8f58-4e7313fe5d90 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.368568] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 506ea987-ea38-444b-81b7-f5343de14e4f/506ea987-ea38-444b-81b7-f5343de14e4f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1061.368832] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8f6aa35-2423-4ca1-8418-d15cffb012cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.388409] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1061.388409] env[61839]: value = "task-1315027" [ 1061.388409] env[61839]: _type = "Task" [ 1061.388409] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.396333] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315027, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.710339] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.710588] env[61839]: DEBUG nova.compute.manager [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1061.710866] env[61839]: DEBUG nova.compute.manager [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing instance network info cache due to event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1061.711172] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.711364] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.711568] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.849054] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ca1e7456-2e12-42da-a9d7-4e88dcda0280 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6b193149-68a3-43fc-a331-1b49e0cab484" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.610s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.898375] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315027, 'name': ReconfigVM_Task, 'duration_secs': 0.249675} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.898662] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 506ea987-ea38-444b-81b7-f5343de14e4f/506ea987-ea38-444b-81b7-f5343de14e4f.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1061.899303] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f29fcbef-5e6f-4b72-ac27-7ee3af4821f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.905597] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1061.905597] env[61839]: value = "task-1315028" [ 1061.905597] env[61839]: _type = "Task" [ 1061.905597] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.912877] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315028, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.415774] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315028, 'name': Rename_Task, 'duration_secs': 0.132027} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.415774] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1062.416154] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18fce7ca-7131-4008-b20f-bb9f429d896a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.422736] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1062.422736] env[61839]: value = "task-1315030" [ 1062.422736] env[61839]: _type = "Task" [ 1062.422736] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.426079] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updated VIF entry in instance network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.426487] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.432916] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315030, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.933229] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.933492] env[61839]: DEBUG nova.compute.manager [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1062.933675] env[61839]: DEBUG nova.compute.manager [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing instance network info cache due to event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1062.933892] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.934055] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.934227] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.935402] env[61839]: DEBUG oslo_vmware.api [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315030, 'name': PowerOnVM_Task, 'duration_secs': 0.494562} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.935649] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.935951] env[61839]: INFO nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Took 7.19 seconds to spawn the instance on the hypervisor. [ 1062.936068] env[61839]: DEBUG nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1062.936877] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba327c1-2615-445a-99a7-870ed878d8be {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.068430] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.068692] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.068939] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.069188] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.069392] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.071920] env[61839]: INFO nova.compute.manager [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Terminating instance [ 1063.076117] env[61839]: DEBUG nova.compute.manager [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1063.076117] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.076117] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ad1757-5abe-4279-b41e-c5fd981671f9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.086574] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.086574] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d13a43a-496e-45fe-b79b-82b885daa964 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.093641] env[61839]: DEBUG oslo_vmware.api [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1063.093641] env[61839]: value = "task-1315031" [ 1063.093641] env[61839]: _type = "Task" [ 1063.093641] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.111018] env[61839]: DEBUG oslo_vmware.api [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315031, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.453474] env[61839]: INFO nova.compute.manager [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Took 11.94 seconds to build instance. [ 1063.606994] env[61839]: DEBUG oslo_vmware.api [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315031, 'name': PowerOffVM_Task, 'duration_secs': 0.27672} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.607451] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1063.607701] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1063.608020] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2dd2796-b2e4-48f2-b4dc-67727fa4fb98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.684399] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1063.684624] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1063.684830] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleting the datastore file [datastore2] 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1063.685112] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e16f4be2-64b6-4c7c-82ce-f4c5341c0450 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.690906] env[61839]: DEBUG oslo_vmware.api [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for the task: (returnval){ [ 1063.690906] env[61839]: value = "task-1315033" [ 1063.690906] env[61839]: _type = "Task" [ 1063.690906] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.694531] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updated VIF entry in instance network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.694873] env[61839]: DEBUG nova.network.neutron [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.700625] env[61839]: DEBUG oslo_vmware.api [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.956671] env[61839]: DEBUG oslo_concurrency.lockutils [None req-015e0c14-d6b9-4af5-b9ac-26ebdfffc776 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.451s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.197664] env[61839]: DEBUG oslo_concurrency.lockutils [req-83dc05d4-78dc-4346-8a36-9c3e0d5ae23a req-cb02f904-5f2f-480b-9f9e-d8e825166cdc service nova] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.201304] env[61839]: DEBUG oslo_vmware.api [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Task: {'id': task-1315033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288159} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.201574] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.201777] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.201965] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.202176] env[61839]: INFO nova.compute.manager [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1064.202443] env[61839]: DEBUG oslo.service.loopingcall [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.202643] env[61839]: DEBUG nova.compute.manager [-] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1064.202739] env[61839]: DEBUG nova.network.neutron [-] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1064.441521] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1064.444616] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281450', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'name': 'volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '625a8fc1-23fc-4035-855f-3d3a963cdcea', 'attached_at': '', 'detached_at': '', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'serial': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1064.444616] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47beb72c-4730-4b63-80be-fc329f6f0428 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.459811] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972e322f-40b0-4dcd-8389-e116a3e8eb97 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.489462] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c/volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.493036] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f3ffc8c-5d0b-43f2-8373-765902ee9db8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.506490] env[61839]: DEBUG nova.compute.manager [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Stashing vm_state: active {{(pid=61839) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1064.516522] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1064.516522] env[61839]: value = "task-1315034" [ 1064.516522] env[61839]: _type = "Task" [ 1064.516522] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.525562] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.529455] env[61839]: DEBUG nova.compute.manager [req-4a24e283-ea02-40fb-9555-3780d893b9ef req-795bbc2b-cfaf-46d3-a59b-4f2771a4ccaf service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Received event network-vif-deleted-a65466f1-75e9-41dd-8045-71a68e9b31d2 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1064.529552] env[61839]: INFO nova.compute.manager [req-4a24e283-ea02-40fb-9555-3780d893b9ef req-795bbc2b-cfaf-46d3-a59b-4f2771a4ccaf service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Neutron deleted interface a65466f1-75e9-41dd-8045-71a68e9b31d2; detaching it from the instance and deleting it from the info cache [ 1064.529675] env[61839]: DEBUG nova.network.neutron [req-4a24e283-ea02-40fb-9555-3780d893b9ef req-795bbc2b-cfaf-46d3-a59b-4f2771a4ccaf service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.007391] env[61839]: DEBUG nova.network.neutron [-] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.026806] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315034, 'name': ReconfigVM_Task, 'duration_secs': 0.346437} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.027097] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfigured VM instance instance-00000064 to attach disk [datastore1] volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c/volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.032434] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.032681] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.034458] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bb23e71-3a1f-47fc-ba84-37b982bca4e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.046367] env[61839]: INFO nova.compute.claims [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1065.049431] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c95a03f-1bc0-4288-8ccb-5e875387c7ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.059302] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280859f4-70e5-4809-ae65-dd338b7a9ed8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.071403] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1065.071403] env[61839]: value = "task-1315035" [ 1065.071403] env[61839]: _type = "Task" [ 1065.071403] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.080224] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315035, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.091157] env[61839]: DEBUG nova.compute.manager [req-4a24e283-ea02-40fb-9555-3780d893b9ef req-795bbc2b-cfaf-46d3-a59b-4f2771a4ccaf service nova] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Detach interface failed, port_id=a65466f1-75e9-41dd-8045-71a68e9b31d2, reason: Instance 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1065.129307] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.129572] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.129762] env[61839]: INFO nova.compute.manager [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Shelving [ 1065.489663] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.510491] env[61839]: INFO nova.compute.manager [-] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Took 1.31 seconds to deallocate network for instance. [ 1065.554020] env[61839]: INFO nova.compute.resource_tracker [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating resource usage from migration e23f1f34-5da9-4652-8613-b9cdd7e73734 [ 1065.582565] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315035, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.637075] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1065.637366] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c6f405b-5e83-473f-8059-84d3391521db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.644683] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1065.644683] env[61839]: value = "task-1315036" [ 1065.644683] env[61839]: _type = "Task" [ 1065.644683] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.652736] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315036, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.681619] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1ed716-422b-4558-9d40-80ae356e1f92 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.692972] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8891beb0-7a65-40fc-902e-be598977131c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.741534] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204ac5c9-71fb-4aa0-bd1d-7684362ca548 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.752831] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2776488-130a-438f-ac54-af7af33b40b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.776900] env[61839]: DEBUG nova.compute.provider_tree [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.017284] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.084715] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315035, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.155433] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315036, 'name': PowerOffVM_Task, 'duration_secs': 0.318472} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.155765] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1066.156596] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee15927-a094-4856-9111-d4350c2554b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.176017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370356b0-67c8-46e2-adbb-65dd82e95d55 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.280441] env[61839]: DEBUG nova.scheduler.client.report [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1066.584608] env[61839]: DEBUG oslo_vmware.api [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315035, 'name': ReconfigVM_Task, 'duration_secs': 1.131658} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.584965] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281450', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'name': 'volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '625a8fc1-23fc-4035-855f-3d3a963cdcea', 'attached_at': '', 'detached_at': '', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'serial': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1066.690394] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1066.690744] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8c6ecf18-eda2-459a-b215-0edbd5028ebe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.698744] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1066.698744] env[61839]: value = "task-1315037" [ 1066.698744] env[61839]: _type = "Task" [ 1066.698744] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.707146] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315037, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.788716] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 1.754s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.788716] env[61839]: INFO nova.compute.manager [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Migrating [ 1066.793154] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.776s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.793446] env[61839]: DEBUG nova.objects.instance [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lazy-loading 'resources' on Instance uuid 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.917951] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5926f90c-000c-4038-82f4-2f8b4dd4b3d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.926821] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bfa441-a805-48c4-aacc-4886fc13d75a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.959510] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa72902-4d5b-443c-b6d1-a0cc49b71ea7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.967928] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a207e3d7-995b-41bb-8079-ddc4877a9f33 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.981558] env[61839]: DEBUG nova.compute.provider_tree [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.211206] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315037, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.304852] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.305192] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.305496] env[61839]: DEBUG nova.network.neutron [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.485454] env[61839]: DEBUG nova.scheduler.client.report [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1067.623228] env[61839]: DEBUG nova.objects.instance [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'flavor' on Instance uuid 625a8fc1-23fc-4035-855f-3d3a963cdcea {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.709791] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315037, 'name': CreateSnapshot_Task, 'duration_secs': 0.839531} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.710148] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1067.710913] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c929450-0940-4fb9-a49f-ccbc62bfd1d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.992097] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.199s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.011844] env[61839]: DEBUG nova.network.neutron [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance_info_cache with network_info: [{"id": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "address": "fa:16:3e:3f:6d:02", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae9c8f9-03", "ovs_interfaceid": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.014201] env[61839]: INFO nova.scheduler.client.report [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Deleted allocations for instance 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8 [ 1068.127998] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8dcbeef0-eeb4-4806-a2a7-432285ffc50a tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.285s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.229242] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1068.229554] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-aaa203cf-9b61-444b-8475-ab375eea6add {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.238319] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1068.238319] env[61839]: value = "task-1315038" [ 1068.238319] env[61839]: _type = "Task" [ 1068.238319] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.246356] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315038, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.514266] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.521014] env[61839]: DEBUG oslo_concurrency.lockutils [None req-da276f41-7023-490c-aee3-8c375ee803c8 tempest-ServersTestJSON-2052072083 tempest-ServersTestJSON-2052072083-project-member] Lock "6f43abec-51e2-40e4-8a0f-5a8617a9a9f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.452s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.748740] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315038, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.941085] env[61839]: DEBUG nova.compute.manager [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Stashing vm_state: active {{(pid=61839) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1068.991844] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1068.992123] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Cleaning up deleted instances {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1069.248703] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315038, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.462996] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.463309] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.505743] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] There are 46 instances to clean {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1069.506174] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 6b193149-68a3-43fc-a331-1b49e0cab484] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1069.748910] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315038, 'name': CloneVM_Task, 'duration_secs': 1.426131} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.749410] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Created linked-clone VM from snapshot [ 1069.749938] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2257afc2-4b0e-42e8-8d28-f9f56e4afdea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.757188] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Uploading image f647401d-2329-4d44-8490-33aad50e15f2 {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1069.779976] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1069.779976] env[61839]: value = "vm-281452" [ 1069.779976] env[61839]: _type = "VirtualMachine" [ 1069.779976] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1069.780366] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ef01e2e8-dc35-4beb-8edc-f85a26e969da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.786778] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease: (returnval){ [ 1069.786778] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52edd135-94fc-b5cd-914d-48f8b3a0e152" [ 1069.786778] env[61839]: _type = "HttpNfcLease" [ 1069.786778] env[61839]: } obtained for exporting VM: (result){ [ 1069.786778] env[61839]: value = "vm-281452" [ 1069.786778] env[61839]: _type = "VirtualMachine" [ 1069.786778] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1069.787051] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the lease: (returnval){ [ 1069.787051] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52edd135-94fc-b5cd-914d-48f8b3a0e152" [ 1069.787051] env[61839]: _type = "HttpNfcLease" [ 1069.787051] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1069.793246] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1069.793246] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52edd135-94fc-b5cd-914d-48f8b3a0e152" [ 1069.793246] env[61839]: _type = "HttpNfcLease" [ 1069.793246] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1069.968417] env[61839]: INFO nova.compute.claims [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1070.009165] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 15794971-10d8-4234-8a72-90c940dae90c] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1070.032404] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f24664-6221-4578-a45c-12e10586120a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.051543] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance '506ea987-ea38-444b-81b7-f5343de14e4f' progress to 0 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1070.296051] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1070.296051] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52edd135-94fc-b5cd-914d-48f8b3a0e152" [ 1070.296051] env[61839]: _type = "HttpNfcLease" [ 1070.296051] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1070.296435] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1070.296435] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52edd135-94fc-b5cd-914d-48f8b3a0e152" [ 1070.296435] env[61839]: _type = "HttpNfcLease" [ 1070.296435] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1070.297451] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4630898f-8cf3-4b70-b07c-0612578eef4e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.305254] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216441c-9c9a-0166-fae7-cdebb0966325/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1070.305493] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216441c-9c9a-0166-fae7-cdebb0966325/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1070.399408] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-646579f7-a07e-43aa-92e4-40e2f7d76fae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.475195] env[61839]: INFO nova.compute.resource_tracker [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating resource usage from migration 609e967d-234d-4e69-aef6-f40a329f11cf [ 1070.513186] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a3cb1dd7-00ef-41f3-8db5-68ef9eb8d156] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1070.556768] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.560197] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79024312-d3cb-4479-a6bb-0d01ce9f9126 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.569731] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1070.569731] env[61839]: value = "task-1315040" [ 1070.569731] env[61839]: _type = "Task" [ 1070.569731] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.583867] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.635662] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334d3a97-93f5-4a9c-8ff0-b4ea6c2a34e7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.647033] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cfac67-ba6b-42ce-9d96-97411d6ed6fe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.682798] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd6c49e-f9e5-43f1-9b6f-a856841892a1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.691512] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a64971-5780-4b3f-964a-d05fb829e9b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.706577] env[61839]: DEBUG nova.compute.provider_tree [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.017670] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 8e9bea05-d6d7-40a8-997d-8c952f596f75] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.079971] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315040, 'name': PowerOffVM_Task, 'duration_secs': 0.205953} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.080368] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.080564] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance '506ea987-ea38-444b-81b7-f5343de14e4f' progress to 17 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1071.211058] env[61839]: DEBUG nova.scheduler.client.report [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1071.521523] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a87f3a17-0a97-4b47-bc95-eee5975f8203] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.587905] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1071.588286] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1071.588543] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.588851] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1071.589031] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.589196] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1071.589412] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1071.589581] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1071.589752] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1071.589923] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1071.590173] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1071.595494] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8b8bc5c-3b7e-4175-80c6-aa0b7649859f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.612253] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1071.612253] env[61839]: value = "task-1315041" [ 1071.612253] env[61839]: _type = "Task" [ 1071.612253] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.620739] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.716373] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.253s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.716599] env[61839]: INFO nova.compute.manager [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Migrating [ 1072.024789] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 3c832102-cacc-4dd8-a336-2aa1d8bd8116] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.123120] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315041, 'name': ReconfigVM_Task, 'duration_secs': 0.197493} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.123412] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance '506ea987-ea38-444b-81b7-f5343de14e4f' progress to 33 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1072.234448] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.234694] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.234941] env[61839]: DEBUG nova.network.neutron [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1072.527741] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 73b83239-bbc8-41d1-aec3-2b4519c320af] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.630601] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1072.631118] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1072.631281] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.631590] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1072.632813] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.633172] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1072.633348] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1072.633645] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1072.633916] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1072.634191] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1072.634473] env[61839]: DEBUG nova.virt.hardware [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1072.640360] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1072.640692] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4ca7ace-e1f6-49b9-a518-9b7eda9d0ec5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.659971] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1072.659971] env[61839]: value = "task-1315042" [ 1072.659971] env[61839]: _type = "Task" [ 1072.659971] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.668894] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.961040] env[61839]: DEBUG nova.network.neutron [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.030627] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a262845a-0ae2-4e0e-9040-01f0ed37c95c] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.170649] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315042, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.465131] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.533917] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 75344275-bdf2-4526-a101-e62ec270dd72] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.670645] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315042, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.037586] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 3e27bc4a-a4f3-4929-931a-0c3ecaf10e65] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.171030] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315042, 'name': ReconfigVM_Task, 'duration_secs': 1.164274} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.171343] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1074.172137] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9521f932-50aa-468b-9209-6fd6f9b5a784 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.202592] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 506ea987-ea38-444b-81b7-f5343de14e4f/506ea987-ea38-444b-81b7-f5343de14e4f.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1074.203506] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db6a78cd-bdac-45c3-acf2-1c275d539667 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.225178] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1074.225178] env[61839]: value = "task-1315043" [ 1074.225178] env[61839]: _type = "Task" [ 1074.225178] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.233192] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315043, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.541503] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 23ee24d5-bccd-497d-a53f-b9723fd9c707] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.735104] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315043, 'name': ReconfigVM_Task, 'duration_secs': 0.479851} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.735764] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 506ea987-ea38-444b-81b7-f5343de14e4f/506ea987-ea38-444b-81b7-f5343de14e4f.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1074.735764] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance '506ea987-ea38-444b-81b7-f5343de14e4f' progress to 50 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1074.980578] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c587c7-18f5-4b9b-8059-fb766715a0b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.002596] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance '625a8fc1-23fc-4035-855f-3d3a963cdcea' progress to 0 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1075.044947] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 21e1c5b2-9865-457b-87c8-ce56c3c7b8f9] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.243088] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af48e91b-d3d3-4b93-8054-695757de6375 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.262559] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae2658f-eac4-4c75-a1bd-62b53743eb41 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.280384] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance '506ea987-ea38-444b-81b7-f5343de14e4f' progress to 67 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1075.511130] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.511436] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6985132-be89-47bb-a415-7342e304d455 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.522112] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1075.522112] env[61839]: value = "task-1315044" [ 1075.522112] env[61839]: _type = "Task" [ 1075.522112] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.530645] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.548606] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d4a8c153-7585-4c78-8aa4-56077e0a7af6] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.821857] env[61839]: DEBUG nova.network.neutron [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Port 8ae9c8f9-030e-48f6-9368-4348d164e3b9 binding to destination host cpu-1 is already ACTIVE {{(pid=61839) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1076.033858] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315044, 'name': PowerOffVM_Task, 'duration_secs': 0.174491} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.034297] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.034584] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance '625a8fc1-23fc-4035-855f-3d3a963cdcea' progress to 17 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1076.052246] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d718d866-dd6c-4332-b63a-be6850a5a785] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.543058] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1076.543324] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1076.543418] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.543631] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1076.543796] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.544018] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1076.544264] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1076.544433] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1076.544624] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1076.544809] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1076.544995] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1076.550544] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93d17016-cc9d-4267-9284-03173e33d02b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.561158] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 12087baa-e700-4977-b2df-3aa2c56cc2f6] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.568799] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1076.568799] env[61839]: value = "task-1315045" [ 1076.568799] env[61839]: _type = "Task" [ 1076.568799] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.578257] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.844159] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.844459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.844667] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.065101] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: bac4c882-a23d-412f-ae98-f4f21d86681a] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.079288] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315045, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.455663] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-dbd34858-9806-4d3f-b829-948651056da2-da45fc64-4aa9-4af8-b820-45a5fe4da356" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.456076] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-dbd34858-9806-4d3f-b829-948651056da2-da45fc64-4aa9-4af8-b820-45a5fe4da356" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.456556] env[61839]: DEBUG nova.objects.instance [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'flavor' on Instance uuid dbd34858-9806-4d3f-b829-948651056da2 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.568048] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 0d42326a-9958-463a-90ae-34fb55e99c5b] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.580635] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315045, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.832072] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216441c-9c9a-0166-fae7-cdebb0966325/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1077.832981] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6d23ee-0090-4512-848c-fe895b4f57bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.839296] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216441c-9c9a-0166-fae7-cdebb0966325/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1077.839463] env[61839]: ERROR oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216441c-9c9a-0166-fae7-cdebb0966325/disk-0.vmdk due to incomplete transfer. [ 1077.839695] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-137b3fec-9214-48a7-8240-5107e7225e13 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.846782] env[61839]: DEBUG oslo_vmware.rw_handles [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216441c-9c9a-0166-fae7-cdebb0966325/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1077.846998] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Uploaded image f647401d-2329-4d44-8490-33aad50e15f2 to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1077.849313] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1077.851839] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f384a8e4-a117-4d77-8ad4-676386a750e1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.858242] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1077.858242] env[61839]: value = "task-1315046" [ 1077.858242] env[61839]: _type = "Task" [ 1077.858242] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.866106] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315046, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.879377] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.879560] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.879743] env[61839]: DEBUG nova.network.neutron [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1078.019410] env[61839]: DEBUG oslo_concurrency.lockutils [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.019654] env[61839]: DEBUG oslo_concurrency.lockutils [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.066218] env[61839]: DEBUG nova.objects.instance [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'pci_requests' on Instance uuid dbd34858-9806-4d3f-b829-948651056da2 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.075092] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 65f34f9e-353a-4f94-8f79-9bda89451885] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.082035] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315045, 'name': ReconfigVM_Task, 'duration_secs': 1.241278} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.082341] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance '625a8fc1-23fc-4035-855f-3d3a963cdcea' progress to 33 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1078.367753] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315046, 'name': Destroy_Task, 'duration_secs': 0.326141} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.368050] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Destroyed the VM [ 1078.368272] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1078.368517] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-64d515a6-1599-4d3f-bf8d-6b8040a03751 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.375316] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1078.375316] env[61839]: value = "task-1315047" [ 1078.375316] env[61839]: _type = "Task" [ 1078.375316] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.384132] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315047, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.522726] env[61839]: DEBUG nova.compute.utils [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1078.568756] env[61839]: DEBUG nova.objects.base [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1078.568977] env[61839]: DEBUG nova.network.neutron [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1078.571992] env[61839]: DEBUG nova.network.neutron [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance_info_cache with network_info: [{"id": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "address": "fa:16:3e:3f:6d:02", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae9c8f9-03", "ovs_interfaceid": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.577674] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 042183e2-d203-4d07-a668-df24ba50e242] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.588260] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1078.588492] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1078.588661] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.588848] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1078.589008] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.589171] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1078.589379] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1078.589542] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1078.589713] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1078.589881] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1078.590079] env[61839]: DEBUG nova.virt.hardware [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1078.595501] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1078.596066] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c460c5f-1978-42b7-9f51-f75c2ab9f551 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.618241] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1078.618241] env[61839]: value = "task-1315048" [ 1078.618241] env[61839]: _type = "Task" [ 1078.618241] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.626198] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315048, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.647901] env[61839]: DEBUG nova.policy [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1078.884766] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315047, 'name': RemoveSnapshot_Task, 'duration_secs': 0.357828} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.885060] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1078.885354] env[61839]: DEBUG nova.compute.manager [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1078.886136] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755db2d6-4653-481a-a3a2-e10ea2a59c30 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.025726] env[61839]: DEBUG oslo_concurrency.lockutils [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.074795] env[61839]: DEBUG oslo_concurrency.lockutils [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.079908] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: d187e75f-39a9-467b-b5ef-e2772d9b71af] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.128497] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315048, 'name': ReconfigVM_Task, 'duration_secs': 0.186963} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.128779] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1079.129562] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fead69-99f4-4da8-9438-a881c2c37195 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.154045] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.154250] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1acb7b6-ac87-49fb-abec-ff814ad35c43 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.171276] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1079.171276] env[61839]: value = "task-1315049" [ 1079.171276] env[61839]: _type = "Task" [ 1079.171276] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.178275] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.398512] env[61839]: INFO nova.compute.manager [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Shelve offloading [ 1079.400182] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.400431] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4f22107-74e9-4f6a-bf2a-fe780455c7db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.407840] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1079.407840] env[61839]: value = "task-1315050" [ 1079.407840] env[61839]: _type = "Task" [ 1079.407840] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.417628] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1079.417826] env[61839]: DEBUG nova.compute.manager [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1079.418592] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83c8abd-4a4d-46ef-b99d-b74f2dea5566 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.423823] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.424021] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.424200] env[61839]: DEBUG nova.network.neutron [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.582729] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: fa8a2265-291e-4424-bea1-72574e495a72] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.677267] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a45c0b-9bf5-48a5-9b39-e22cd066d8e9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.697993] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315049, 'name': ReconfigVM_Task, 'duration_secs': 0.290116} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.698693] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3c354f-2b30-4286-8a0d-86763bb23999 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.701255] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.701558] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance '625a8fc1-23fc-4035-855f-3d3a963cdcea' progress to 50 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1079.709134] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance '506ea987-ea38-444b-81b7-f5343de14e4f' progress to 83 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.048289] env[61839]: DEBUG nova.compute.manager [req-e72673af-2728-4be3-9e37-6ad4e789dacb req-32d5a358-3f29-4af6-b68c-c2a5a3eac43c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-vif-plugged-da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1080.048478] env[61839]: DEBUG oslo_concurrency.lockutils [req-e72673af-2728-4be3-9e37-6ad4e789dacb req-32d5a358-3f29-4af6-b68c-c2a5a3eac43c service nova] Acquiring lock "dbd34858-9806-4d3f-b829-948651056da2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.048700] env[61839]: DEBUG oslo_concurrency.lockutils [req-e72673af-2728-4be3-9e37-6ad4e789dacb req-32d5a358-3f29-4af6-b68c-c2a5a3eac43c service nova] Lock "dbd34858-9806-4d3f-b829-948651056da2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.048878] env[61839]: DEBUG oslo_concurrency.lockutils [req-e72673af-2728-4be3-9e37-6ad4e789dacb req-32d5a358-3f29-4af6-b68c-c2a5a3eac43c service nova] Lock "dbd34858-9806-4d3f-b829-948651056da2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.049103] env[61839]: DEBUG nova.compute.manager [req-e72673af-2728-4be3-9e37-6ad4e789dacb req-32d5a358-3f29-4af6-b68c-c2a5a3eac43c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] No waiting events found dispatching network-vif-plugged-da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1080.049297] env[61839]: WARNING nova.compute.manager [req-e72673af-2728-4be3-9e37-6ad4e789dacb req-32d5a358-3f29-4af6-b68c-c2a5a3eac43c service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received unexpected event network-vif-plugged-da45fc64-4aa9-4af8-b820-45a5fe4da356 for instance with vm_state active and task_state None. [ 1080.089994] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a4463efc-ffca-4552-a072-cbf5fe062533] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.092480] env[61839]: DEBUG oslo_concurrency.lockutils [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.092715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.092938] env[61839]: INFO nova.compute.manager [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Attaching volume bf363321-5f33-48dc-9ed5-40c5f2cf0d88 to /dev/sdb [ 1080.126739] env[61839]: DEBUG nova.network.neutron [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Successfully updated port: da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1080.136366] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97418603-ca01-4bfc-be94-03a56dba3485 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.140045] env[61839]: DEBUG nova.network.neutron [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [{"id": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "address": "fa:16:3e:a5:ad:80", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b7a9c37-10", "ovs_interfaceid": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.146882] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5918f308-8f06-420f-bd22-41c30dd7e4b4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.160795] env[61839]: DEBUG nova.virt.block_device [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updating existing volume attachment record: d69a45c2-571a-4b8e-b23a-affe72cee8b6 {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1080.207800] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb26a8a5-6e27-44f8-9603-eb44dd6edd71 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.214549] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1080.229520] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-455cee23-2285-4742-9b97-43746dc9a281 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.231953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86987b8-0097-4a26-963b-1cb9e4a8fd64 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.253226] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance '625a8fc1-23fc-4035-855f-3d3a963cdcea' progress to 67 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.257492] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1080.257492] env[61839]: value = "task-1315051" [ 1080.257492] env[61839]: _type = "Task" [ 1080.257492] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.265505] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315051, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.595428] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: e47c08c6-5de3-48b0-8327-57ddb273555f] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.629952] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.630184] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.630373] env[61839]: DEBUG nova.network.neutron [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.643194] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.774023] env[61839]: DEBUG oslo_vmware.api [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315051, 'name': PowerOnVM_Task, 'duration_secs': 0.471156} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.774487] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1080.774911] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-daf7eee7-c614-4c76-9727-c9d37b28607d tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance '506ea987-ea38-444b-81b7-f5343de14e4f' progress to 100 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.928042] env[61839]: DEBUG nova.compute.manager [req-b0124ad4-9faa-4799-a827-7b74b5076c92 req-ac84bad6-fb4c-497b-bb40-ac4f0b26274c service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received event network-vif-unplugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1080.928302] env[61839]: DEBUG oslo_concurrency.lockutils [req-b0124ad4-9faa-4799-a827-7b74b5076c92 req-ac84bad6-fb4c-497b-bb40-ac4f0b26274c service nova] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.928531] env[61839]: DEBUG oslo_concurrency.lockutils [req-b0124ad4-9faa-4799-a827-7b74b5076c92 req-ac84bad6-fb4c-497b-bb40-ac4f0b26274c service nova] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.928708] env[61839]: DEBUG oslo_concurrency.lockutils [req-b0124ad4-9faa-4799-a827-7b74b5076c92 req-ac84bad6-fb4c-497b-bb40-ac4f0b26274c service nova] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.928879] env[61839]: DEBUG nova.compute.manager [req-b0124ad4-9faa-4799-a827-7b74b5076c92 req-ac84bad6-fb4c-497b-bb40-ac4f0b26274c service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] No waiting events found dispatching network-vif-unplugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1080.929180] env[61839]: WARNING nova.compute.manager [req-b0124ad4-9faa-4799-a827-7b74b5076c92 req-ac84bad6-fb4c-497b-bb40-ac4f0b26274c service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received unexpected event network-vif-unplugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 for instance with vm_state shelved and task_state shelving_offloading. [ 1081.008652] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.009650] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afb67be-00e0-45f1-b889-e43374416383 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.017064] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1081.017314] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d97f8cd-ce17-4660-9dcf-f6a7f66639ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.081177] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1081.081519] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1081.081839] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleting the datastore file [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1081.082331] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85e7a0b1-b36b-4e84-8ed9-cfa28d00f6bf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.089317] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1081.089317] env[61839]: value = "task-1315056" [ 1081.089317] env[61839]: _type = "Task" [ 1081.089317] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.096592] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.099048] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 56369316-a445-4a2a-a0a6-967074104e19] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.166333] env[61839]: WARNING nova.network.neutron [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] 41c98894-de91-45eb-a390-6217e0f9dca5 already exists in list: networks containing: ['41c98894-de91-45eb-a390-6217e0f9dca5']. ignoring it [ 1081.438103] env[61839]: DEBUG nova.network.neutron [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "address": "fa:16:3e:df:3c:4e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda45fc64-4a", "ovs_interfaceid": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.599018] env[61839]: DEBUG oslo_vmware.api [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153222} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.599234] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.599431] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1081.599616] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1081.601785] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: ef49a6f5-27c3-4595-af65-d6a5aa47d4e4] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.623115] env[61839]: INFO nova.scheduler.client.report [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted allocations for instance a1defab7-8433-411d-b7e2-c31f6a34b8e0 [ 1081.906539] env[61839]: DEBUG nova.network.neutron [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Port da11baa5-354e-440b-a384-10cd83ff2715 binding to destination host cpu-1 is already ACTIVE {{(pid=61839) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1081.941439] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.942202] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.942373] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.943238] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f701dae2-7503-4832-837d-2b25b51952e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.961507] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1081.961764] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1081.961928] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.962132] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1081.962304] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.962442] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1081.962653] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1081.962884] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1081.963036] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1081.963173] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1081.963355] env[61839]: DEBUG nova.virt.hardware [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1081.969882] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Reconfiguring VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1081.970239] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11e9ce20-47e9-49bc-9188-91191535fb21 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.988517] env[61839]: DEBUG oslo_vmware.api [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1081.988517] env[61839]: value = "task-1315057" [ 1081.988517] env[61839]: _type = "Task" [ 1081.988517] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.997021] env[61839]: DEBUG oslo_vmware.api [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315057, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.105194] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: c996d7db-4b73-4445-9989-4efb2cd852e8] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.127651] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.127976] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.128311] env[61839]: DEBUG nova.objects.instance [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'resources' on Instance uuid a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.499083] env[61839]: DEBUG oslo_vmware.api [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315057, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.608645] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: c180cc04-79da-4529-a905-1985a85b7cf5] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.631300] env[61839]: DEBUG nova.objects.instance [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'numa_topology' on Instance uuid a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.721598] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "506ea987-ea38-444b-81b7-f5343de14e4f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.721938] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.722198] env[61839]: DEBUG nova.compute.manager [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Going to confirm migration 4 {{(pid=61839) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1082.928569] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.928835] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.929048] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.999433] env[61839]: DEBUG oslo_vmware.api [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315057, 'name': ReconfigVM_Task, 'duration_secs': 0.651952} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.999936] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.000270] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Reconfigured VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1083.112272] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: ce59c937-fc0b-464f-baaa-461c6f6c2d57] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.133962] env[61839]: DEBUG nova.objects.base [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1083.237017] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c573e0-c895-4be2-a346-9f5d73a4d5a6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.244577] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea8c9d7-651d-476c-b614-f8cffaae8596 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.273433] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23aca1b-a713-43aa-becd-249685bf36ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.280593] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b58efb6-0da0-4f7f-a9f9-070b67847f65 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.295829] env[61839]: DEBUG nova.compute.provider_tree [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.297988] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.298180] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquired lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.298354] env[61839]: DEBUG nova.network.neutron [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.298543] env[61839]: DEBUG nova.objects.instance [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'info_cache' on Instance uuid 506ea987-ea38-444b-81b7-f5343de14e4f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.505143] env[61839]: DEBUG oslo_concurrency.lockutils [None req-ebefa2c6-0a2f-4c23-a794-83aa597b3b8d tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-dbd34858-9806-4d3f-b829-948651056da2-da45fc64-4aa9-4af8-b820-45a5fe4da356" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.049s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.615713] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 2cb53e37-8b0b-48b7-a973-061b91df46df] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.801595] env[61839]: DEBUG nova.scheduler.client.report [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1083.964982] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.965201] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.965382] env[61839]: DEBUG nova.network.neutron [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1084.119166] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 3f86a0d5-30fd-42cc-bd40-14bce9d0e56f] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.307790] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.180s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.503943] env[61839]: DEBUG nova.network.neutron [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance_info_cache with network_info: [{"id": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "address": "fa:16:3e:3f:6d:02", "network": {"id": "47b25b31-1262-485a-a847-2918f7fce488", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-914746239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25686a503d044467a1d641f14e14c65c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae9c8f9-03", "ovs_interfaceid": "8ae9c8f9-030e-48f6-9368-4348d164e3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.593637] env[61839]: DEBUG nova.compute.manager [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-changed-da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1084.593908] env[61839]: DEBUG nova.compute.manager [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing instance network info cache due to event network-changed-da45fc64-4aa9-4af8-b820-45a5fe4da356. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1084.594192] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.594398] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.594611] env[61839]: DEBUG nova.network.neutron [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing network info cache for port da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.622433] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 40c54d84-8e50-483a-b4e0-5f1cc72b0880] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.705141] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1084.705405] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1084.706385] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a686ecd8-5ac7-4276-bd0b-8bfc19f0b3a1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.723205] env[61839]: DEBUG nova.network.neutron [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.724778] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa0a58f-94ac-45fa-bea5-5ee16edb59f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.750673] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88/volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.751517] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bdce4c0-5e5e-4d43-bf9c-d17a72bf2f95 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.770097] env[61839]: DEBUG oslo_vmware.api [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1084.770097] env[61839]: value = "task-1315059" [ 1084.770097] env[61839]: _type = "Task" [ 1084.770097] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.779739] env[61839]: DEBUG oslo_vmware.api [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315059, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.817706] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9fdc0e-074c-47a8-bcba-feb8c13e1519 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.688s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.007019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Releasing lock "refresh_cache-506ea987-ea38-444b-81b7-f5343de14e4f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.007128] env[61839]: DEBUG nova.objects.instance [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lazy-loading 'migration_context' on Instance uuid 506ea987-ea38-444b-81b7-f5343de14e4f {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.128477] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 62959833-5834-4c0a-bf4e-3ac1157b3b0c] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.228092] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.280574] env[61839]: DEBUG oslo_vmware.api [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315059, 'name': ReconfigVM_Task, 'duration_secs': 0.351788} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.280859] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfigured VM instance instance-00000067 to attach disk [datastore2] volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88/volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.285798] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-203b5cdf-29da-4831-a336-56b2fee1c72f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.301237] env[61839]: DEBUG oslo_vmware.api [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1085.301237] env[61839]: value = "task-1315060" [ 1085.301237] env[61839]: _type = "Task" [ 1085.301237] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.309143] env[61839]: DEBUG oslo_vmware.api [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315060, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.309951] env[61839]: DEBUG nova.network.neutron [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updated VIF entry in instance network info cache for port da45fc64-4aa9-4af8-b820-45a5fe4da356. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1085.310372] env[61839]: DEBUG nova.network.neutron [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "address": "fa:16:3e:df:3c:4e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda45fc64-4a", "ovs_interfaceid": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.510236] env[61839]: DEBUG nova.objects.base [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Object Instance<506ea987-ea38-444b-81b7-f5343de14e4f> lazy-loaded attributes: info_cache,migration_context {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1085.511207] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3839e5-2ed1-48c2-8a7b-b0e13fde874f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.530932] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cd559d0-d4bb-49b6-8487-0fba133a76b3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.535997] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1085.535997] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef1683-fcc8-9934-d341-6dfcfe2926b8" [ 1085.535997] env[61839]: _type = "Task" [ 1085.535997] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.543079] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef1683-fcc8-9934-d341-6dfcfe2926b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.630997] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 49d4720b-83e3-47d9-b727-5bb255de2e7c] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.738191] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d2737c-807e-4c0d-a9ee-218e7d86db25 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.745374] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dbca87f-39fb-47ea-9ae0-025efaab4279 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.810105] env[61839]: DEBUG oslo_vmware.api [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.812867] env[61839]: DEBUG oslo_concurrency.lockutils [req-7c64fba1-c94c-4e2c-928c-af98d164405d req-4a35b7e3-ec8a-4f8f-af12-8d916bb058bb service nova] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.046413] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52ef1683-fcc8-9934-d341-6dfcfe2926b8, 'name': SearchDatastore_Task, 'duration_secs': 0.008312} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.046552] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.046802] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.104542] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-dbd34858-9806-4d3f-b829-948651056da2-da45fc64-4aa9-4af8-b820-45a5fe4da356" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.104809] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-dbd34858-9806-4d3f-b829-948651056da2-da45fc64-4aa9-4af8-b820-45a5fe4da356" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.133784] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 39728872-2d30-48eb-90da-412f1e45971c] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.311288] env[61839]: DEBUG oslo_vmware.api [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315060, 'name': ReconfigVM_Task, 'duration_secs': 0.90024} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.311611] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1086.606943] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.607175] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.608216] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6465f351-ac4a-4a8a-8021-fe0a8f0aba3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.628996] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f79dd5-fc68-49d0-b33f-36f672749e4e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.648705] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 694a5d4b-3673-406b-a24a-d37fad33e549] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.656258] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Reconfiguring VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1086.658465] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d3aeec3-ec59-478d-9690-5c1afa950837 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.677644] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1086.677644] env[61839]: value = "task-1315061" [ 1086.677644] env[61839]: _type = "Task" [ 1086.677644] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.687602] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.705139] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac651fc0-a37f-4eaa-bfa3-092098b3561f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.712033] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33b9461-fd7f-4b3e-83fb-95906a18f50a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.741905] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb91196-7cac-493e-954c-c3bcf59181d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.749380] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8573116-19b5-466d-8237-d3424b1d930b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.762297] env[61839]: DEBUG nova.compute.provider_tree [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.767434] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.767682] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.767869] env[61839]: INFO nova.compute.manager [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Unshelving [ 1086.840274] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6c8ae1-513a-4c92-9df3-913235fbc323 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.863170] env[61839]: DEBUG nova.objects.instance [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'flavor' on Instance uuid 25c574c4-e39b-4009-a562-a4a5bf74a40c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.864807] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525fcb24-5a55-445e-bd39-745098ac2ed7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.872344] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance '625a8fc1-23fc-4035-855f-3d3a963cdcea' progress to 83 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1087.160081] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 406da948-71c7-4c28-9ee3-10af64b1ab51] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.191139] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.267616] env[61839]: DEBUG nova.scheduler.client.report [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1087.371078] env[61839]: DEBUG oslo_concurrency.lockutils [None req-279fa87f-e9be-4e4a-b5f3-6b0903e84a6f tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.278s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.378943] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.379125] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-094064ee-451e-429b-95a6-51ab9f1e9c18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.386740] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1087.386740] env[61839]: value = "task-1315062" [ 1087.386740] env[61839]: _type = "Task" [ 1087.386740] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.394173] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.663432] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 047080fa-8781-47b1-89d8-2e4c8031b164] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.689742] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.790935] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.897730] env[61839]: DEBUG oslo_vmware.api [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315062, 'name': PowerOnVM_Task, 'duration_secs': 0.432842} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.898155] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.898297] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e8325cb9-ae88-479e-8845-fb08b904dac5 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance '625a8fc1-23fc-4035-855f-3d3a963cdcea' progress to 100 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1088.167227] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 6f43abec-51e2-40e4-8a0f-5a8617a9a9f8] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.192974] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.279418] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.232s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.282435] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.492s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.282671] env[61839]: DEBUG nova.objects.instance [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'pci_requests' on Instance uuid a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.670592] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: fc42bc8d-012e-4a5e-89fc-9b8da1c58fd4] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.690244] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.789047] env[61839]: DEBUG nova.objects.instance [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'numa_topology' on Instance uuid a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.837455] env[61839]: INFO nova.scheduler.client.report [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted allocation for migration e23f1f34-5da9-4652-8613-b9cdd7e73734 [ 1088.861634] env[61839]: INFO nova.compute.manager [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Rebuilding instance [ 1088.896933] env[61839]: DEBUG nova.compute.manager [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1088.897973] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4493d0e5-fb6c-4423-84cf-395f77bea562 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.173359] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a661cc10-5c4e-421b-b70b-189f0a613e8a] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.191401] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.293011] env[61839]: INFO nova.compute.claims [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1089.342254] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.620s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.409968] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.410336] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e33a9705-d0df-426e-95b9-981e726a9e2b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.418629] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1089.418629] env[61839]: value = "task-1315063" [ 1089.418629] env[61839]: _type = "Task" [ 1089.418629] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.426789] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315063, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.677138] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 86525ea7-af75-4b10-85a1-c0fbab73ea5f] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.691668] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.927941] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315063, 'name': PowerOffVM_Task, 'duration_secs': 0.248012} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.928328] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.973662] env[61839]: DEBUG nova.network.neutron [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Port da11baa5-354e-440b-a384-10cd83ff2715 binding to destination host cpu-1 is already ACTIVE {{(pid=61839) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1089.974040] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.974221] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.974403] env[61839]: DEBUG nova.network.neutron [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.980835] env[61839]: INFO nova.compute.manager [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Detaching volume bf363321-5f33-48dc-9ed5-40c5f2cf0d88 [ 1090.012329] env[61839]: INFO nova.virt.block_device [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Attempting to driver detach volume bf363321-5f33-48dc-9ed5-40c5f2cf0d88 from mountpoint /dev/sdb [ 1090.012572] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1090.012758] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1090.013618] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce50d4f-e468-4309-b3a7-6b941a136260 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.035385] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6491da09-a26b-477c-81ca-1e60d566e88c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.041669] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4e5b45-b6bb-4ecd-8cd1-11a43a3496d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.060628] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a2b92d-c178-406a-b48f-db16b1d749f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.075732] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] The volume has not been displaced from its original location: [datastore2] volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88/volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1090.080862] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1090.081128] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b15d8a5f-3e66-4cdd-80d9-f368a1b030d1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.097858] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1090.097858] env[61839]: value = "task-1315064" [ 1090.097858] env[61839]: _type = "Task" [ 1090.097858] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.104880] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315064, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.181157] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 603191b6-a4b0-451b-b98b-f3dbfb684300] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.192291] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.407503] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f97a693-c320-4d4b-9030-d77d019f6e26 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.415019] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4f5dca-7cda-44f8-8721-08e7c0c66ec9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.443635] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c6554f-84d8-434d-bf41-1002fa4871ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.450691] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4286d231-834c-498c-b419-8e00fef30018 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.464765] env[61839]: DEBUG nova.compute.provider_tree [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.607794] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315064, 'name': ReconfigVM_Task, 'duration_secs': 0.198602} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.608087] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1090.612865] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86695f96-9ba5-4db6-a985-d09f7a193e1d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.627503] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1090.627503] env[61839]: value = "task-1315065" [ 1090.627503] env[61839]: _type = "Task" [ 1090.627503] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.635537] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315065, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.687429] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 5c29c188-a34b-4751-9f8b-166af7b15088] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.697717] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.741042] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "506ea987-ea38-444b-81b7-f5343de14e4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.741459] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.741709] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.741902] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.742089] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.744238] env[61839]: INFO nova.compute.manager [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Terminating instance [ 1090.746102] env[61839]: DEBUG nova.compute.manager [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1090.746312] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.747160] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6a8403-5911-4f4a-8ebd-6298f16d4ad6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.755096] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.755330] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31ebc30f-dc7a-41c4-a917-45f46dab3e6b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.762315] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1090.762315] env[61839]: value = "task-1315066" [ 1090.762315] env[61839]: _type = "Task" [ 1090.762315] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.765881] env[61839]: DEBUG nova.network.neutron [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.771870] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.968383] env[61839]: DEBUG nova.scheduler.client.report [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1091.137519] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315065, 'name': ReconfigVM_Task, 'duration_secs': 0.208989} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.137821] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1091.192979] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: e65da0fd-e877-4b25-a319-e4d65397056a] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.194762] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.268400] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.273021] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315066, 'name': PowerOffVM_Task, 'duration_secs': 0.200764} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.273600] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1091.273600] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1091.273822] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22600ae1-424f-484c-a203-2d7612272fad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.333763] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.333999] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.334213] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleting the datastore file [datastore1] 506ea987-ea38-444b-81b7-f5343de14e4f {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.334489] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56c1d9b9-f6f5-4bb9-8ad0-66195673fc88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.341350] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for the task: (returnval){ [ 1091.341350] env[61839]: value = "task-1315068" [ 1091.341350] env[61839]: _type = "Task" [ 1091.341350] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.348978] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.473670] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.191s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.504112] env[61839]: INFO nova.network.neutron [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating port 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1091.696415] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 687e6bf8-cf2a-4a9f-ad87-b42c4d0ced0a] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.698516] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.775596] env[61839]: DEBUG nova.compute.manager [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61839) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1091.851466] env[61839]: DEBUG oslo_vmware.api [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Task: {'id': task-1315068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147941} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.851708] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.851983] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.852185] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.852363] env[61839]: INFO nova.compute.manager [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1091.852608] env[61839]: DEBUG oslo.service.loopingcall [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.852797] env[61839]: DEBUG nova.compute.manager [-] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1091.852892] env[61839]: DEBUG nova.network.neutron [-] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.117046] env[61839]: DEBUG nova.compute.manager [req-d11eae63-7739-49c3-ba99-d2cbaade16f9 req-091d7d26-fc64-48d1-b0a6-f01b67557ebe service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Received event network-vif-deleted-8ae9c8f9-030e-48f6-9368-4348d164e3b9 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.117292] env[61839]: INFO nova.compute.manager [req-d11eae63-7739-49c3-ba99-d2cbaade16f9 req-091d7d26-fc64-48d1-b0a6-f01b67557ebe service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Neutron deleted interface 8ae9c8f9-030e-48f6-9368-4348d164e3b9; detaching it from the instance and deleting it from the info cache [ 1092.117483] env[61839]: DEBUG nova.network.neutron [req-d11eae63-7739-49c3-ba99-d2cbaade16f9 req-091d7d26-fc64-48d1-b0a6-f01b67557ebe service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.192084] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1092.195544] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-105d4223-bd2c-4782-ad09-9263d2d58808 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.197234] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.202697] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 0bc0eefd-8a56-4cd6-a0b5-818cc437d917] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1092.204394] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1092.204394] env[61839]: value = "task-1315069" [ 1092.204394] env[61839]: _type = "Task" [ 1092.204394] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.212302] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1092.212535] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1092.212740] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1092.213465] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32bfb1b-d0da-486c-a0dc-e87589e59980 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.231942] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc693b4-cbbc-4fd0-af1f-79f056abc9c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.239104] env[61839]: WARNING nova.virt.vmwareapi.driver [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1092.239387] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1092.240318] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52155ee-7ec0-43f3-8ffb-fb3c91e97c56 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.246621] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.246843] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b004147-3df2-4c13-a7b8-da732de73853 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.549594] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.552017] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.552017] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleting the datastore file [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.552017] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1abc766-5863-4fb3-8e57-2a78e660113b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.557244] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1092.557244] env[61839]: value = "task-1315071" [ 1092.557244] env[61839]: _type = "Task" [ 1092.557244] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.566406] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.590126] env[61839]: DEBUG nova.network.neutron [-] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.619647] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cd877b1-7361-4491-a1ec-62d2bb5c9659 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.631624] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a04067-0a03-46d2-b0ae-e34598e42f89 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.658507] env[61839]: DEBUG nova.compute.manager [req-d11eae63-7739-49c3-ba99-d2cbaade16f9 req-091d7d26-fc64-48d1-b0a6-f01b67557ebe service nova] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Detach interface failed, port_id=8ae9c8f9-030e-48f6-9368-4348d164e3b9, reason: Instance 506ea987-ea38-444b-81b7-f5343de14e4f could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1092.695122] env[61839]: DEBUG oslo_vmware.api [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315061, 'name': ReconfigVM_Task, 'duration_secs': 6.008709} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.695401] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.695669] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Reconfigured VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1092.705451] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.705673] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Cleaning up deleted instances with incomplete migration {{(pid=61839) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1092.879031] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.879031] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.056154] env[61839]: DEBUG nova.compute.manager [req-44f2d05c-d92e-4534-9446-b1d49734f45f req-a7c22072-316e-4593-b6a7-8a71fbbce120 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received event network-vif-plugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1093.056154] env[61839]: DEBUG oslo_concurrency.lockutils [req-44f2d05c-d92e-4534-9446-b1d49734f45f req-a7c22072-316e-4593-b6a7-8a71fbbce120 service nova] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.056154] env[61839]: DEBUG oslo_concurrency.lockutils [req-44f2d05c-d92e-4534-9446-b1d49734f45f req-a7c22072-316e-4593-b6a7-8a71fbbce120 service nova] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.056154] env[61839]: DEBUG oslo_concurrency.lockutils [req-44f2d05c-d92e-4534-9446-b1d49734f45f req-a7c22072-316e-4593-b6a7-8a71fbbce120 service nova] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.056614] env[61839]: DEBUG nova.compute.manager [req-44f2d05c-d92e-4534-9446-b1d49734f45f req-a7c22072-316e-4593-b6a7-8a71fbbce120 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] No waiting events found dispatching network-vif-plugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1093.056940] env[61839]: WARNING nova.compute.manager [req-44f2d05c-d92e-4534-9446-b1d49734f45f req-a7c22072-316e-4593-b6a7-8a71fbbce120 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received unexpected event network-vif-plugged-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 for instance with vm_state shelved_offloaded and task_state spawning. [ 1093.068031] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151655} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.068412] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.068749] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1093.069100] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1093.094081] env[61839]: INFO nova.compute.manager [-] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Took 1.24 seconds to deallocate network for instance. [ 1093.147919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.147919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.147919] env[61839]: DEBUG nova.network.neutron [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.381282] env[61839]: DEBUG nova.objects.instance [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'migration_context' on Instance uuid 625a8fc1-23fc-4035-855f-3d3a963cdcea {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.573522] env[61839]: INFO nova.virt.block_device [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Booting with volume bf363321-5f33-48dc-9ed5-40c5f2cf0d88 at /dev/sdb [ 1093.600012] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.608429] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bd994ef-c264-46f3-92f1-2f6de1c81f48 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.618015] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d6d4d2-73ff-4b4a-8405-2d8f020d6491 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.643428] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ff8ee18-fd12-44ac-9375-bf20d4559530 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.650801] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3e9e80-fe91-4729-8cf5-149e78c20fae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.678721] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ba2d26-e0f7-4eaa-8a30-53d6dbde7fc5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.684676] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b587e9-8217-440e-a603-553013b51a8b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.697140] env[61839]: DEBUG nova.virt.block_device [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updating existing volume attachment record: df9075e0-c7b7-4e11-befc-0362081f6692 {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1093.704390] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.704583] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.895423] env[61839]: DEBUG nova.network.neutron [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [{"id": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "address": "fa:16:3e:a5:ad:80", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b7a9c37-10", "ovs_interfaceid": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.996555] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.996844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.997164] env[61839]: DEBUG nova.network.neutron [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.999100] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df50988-1b68-4027-93ec-0acc709f2b1e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.008548] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8932783f-bde9-4f01-977c-49aec205fcb0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.042137] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edcb9d8-0453-4994-8f2d-3816704debc7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.049744] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01097425-50f5-4747-8ba5-526e3a5c129a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.064438] env[61839]: DEBUG nova.compute.provider_tree [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.210579] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.210749] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1094.398072] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.423989] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1097fa3c9ef0f0e43950d2558562ae1e',container_format='bare',created_at=2024-10-18T17:02:04Z,direct_url=,disk_format='vmdk',id=f647401d-2329-4d44-8490-33aad50e15f2,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-719370782-shelved',owner='b73ee7e490914f54925597f38c8cc05b',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-10-18T17:02:17Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1094.424315] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1094.424484] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.424670] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1094.424819] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.424969] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1094.425198] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1094.425365] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1094.425535] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1094.425700] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1094.425874] env[61839]: DEBUG nova.virt.hardware [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1094.426848] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb2aeed-1ac3-420f-abde-7cd6487f72e3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.435182] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bcc6b35-cdff-467d-9467-1dc1e3426860 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.448129] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:ad:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1094.455404] env[61839]: DEBUG oslo.service.loopingcall [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1094.455637] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1094.455836] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bd8b0fd-725d-4f9d-a138-530f584d0d93 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.473698] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1094.473698] env[61839]: value = "task-1315072" [ 1094.473698] env[61839]: _type = "Task" [ 1094.473698] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.480852] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315072, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.588231] env[61839]: ERROR nova.scheduler.client.report [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [req-d5048b18-bdb6-4422-bd33-ec9f8b5adbcc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d5048b18-bdb6-4422-bd33-ec9f8b5adbcc"}]} [ 1094.604554] env[61839]: DEBUG nova.scheduler.client.report [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1094.619899] env[61839]: DEBUG nova.scheduler.client.report [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1094.620157] env[61839]: DEBUG nova.compute.provider_tree [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.631845] env[61839]: DEBUG nova.scheduler.client.report [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1094.651318] env[61839]: DEBUG nova.scheduler.client.report [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1094.771183] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c24642f-2eb5-4208-a434-b7ee6d22073d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.778176] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406e8e78-1f8f-4c2e-9a66-c6e2b966dc58 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.820024] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32834869-3c25-4c2d-8ac1-64d47dbec377 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.825601] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5778c75-4f74-45f5-8f66-1b95ff0b943a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.840485] env[61839]: DEBUG nova.compute.provider_tree [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.937568] env[61839]: INFO nova.network.neutron [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Port da45fc64-4aa9-4af8-b820-45a5fe4da356 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1094.937938] env[61839]: DEBUG nova.network.neutron [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.983890] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315072, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.091534] env[61839]: DEBUG nova.compute.manager [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received event network-changed-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1095.091817] env[61839]: DEBUG nova.compute.manager [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Refreshing instance network info cache due to event network-changed-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1095.092374] env[61839]: DEBUG oslo_concurrency.lockutils [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] Acquiring lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.092537] env[61839]: DEBUG oslo_concurrency.lockutils [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] Acquired lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.092711] env[61839]: DEBUG nova.network.neutron [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Refreshing network info cache for port 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.352810] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1095.353068] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1095.353317] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.353700] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1095.353888] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.354077] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1095.354300] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1095.354469] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1095.354646] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1095.354814] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1095.354994] env[61839]: DEBUG nova.virt.hardware [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1095.356299] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15aba1e8-4471-417a-870a-6b53f612859a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.364794] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd57cc81-ba7b-47f9-b802-7348a6221f33 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.379088] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:ef:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98c98e8e-abd7-4f81-9969-154f0e05908e', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.386761] env[61839]: DEBUG oslo.service.loopingcall [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1095.387906] env[61839]: DEBUG nova.scheduler.client.report [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1095.388162] env[61839]: DEBUG nova.compute.provider_tree [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 134 to 135 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1095.388353] env[61839]: DEBUG nova.compute.provider_tree [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.393901] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1095.393901] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d1c722c-6d98-427f-8c41-c6a387a587c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.411065] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.411065] env[61839]: value = "task-1315073" [ 1095.411065] env[61839]: _type = "Task" [ 1095.411065] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.419689] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315073, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.441551] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.484272] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315072, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.839629] env[61839]: DEBUG nova.network.neutron [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updated VIF entry in instance network info cache for port 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.840070] env[61839]: DEBUG nova.network.neutron [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [{"id": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "address": "fa:16:3e:a5:ad:80", "network": {"id": "e012ca9f-236a-42fd-a444-759508fbba7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-567894073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73ee7e490914f54925597f38c8cc05b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b7a9c37-10", "ovs_interfaceid": "8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.924961] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315073, 'name': CreateVM_Task} progress is 25%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.946021] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8d7ff474-a6f5-476f-9ecc-ca51646b6dcf tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-dbd34858-9806-4d3f-b829-948651056da2-da45fc64-4aa9-4af8-b820-45a5fe4da356" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.841s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.959182] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-6b12ef55-b566-4a74-a794-b4e4c41debe1-da45fc64-4aa9-4af8-b820-45a5fe4da356" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.959598] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-6b12ef55-b566-4a74-a794-b4e4c41debe1-da45fc64-4aa9-4af8-b820-45a5fe4da356" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.960145] env[61839]: DEBUG nova.objects.instance [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'flavor' on Instance uuid 6b12ef55-b566-4a74-a794-b4e4c41debe1 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.990065] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315072, 'name': CreateVM_Task, 'duration_secs': 1.32798} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.990248] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1095.991225] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.991496] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.992055] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1095.992522] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76429d7c-a0fa-479a-a52a-4cf6d79156df {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.999030] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1095.999030] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f55d9d-f96b-1636-692f-4b5824c6e3c6" [ 1095.999030] env[61839]: _type = "Task" [ 1095.999030] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.014702] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f55d9d-f96b-1636-692f-4b5824c6e3c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.343529] env[61839]: DEBUG oslo_concurrency.lockutils [req-f1da4177-cb47-4d31-a2bf-9831393a673e req-0256a42e-cbc3-4add-926b-96175a6daebd service nova] Releasing lock "refresh_cache-a1defab7-8433-411d-b7e2-c31f6a34b8e0" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.400471] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.521s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.405985] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.806s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.406274] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.423856] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315073, 'name': CreateVM_Task, 'duration_secs': 0.623782} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.424567] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1096.425255] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.425428] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.425766] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1096.426283] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb33ee86-c88d-4261-9bf2-6b958902bf09 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.431641] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1096.431641] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526bf2b3-3afe-034f-0015-5c38f0788385" [ 1096.431641] env[61839]: _type = "Task" [ 1096.431641] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.440906] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526bf2b3-3afe-034f-0015-5c38f0788385, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.446225] env[61839]: INFO nova.scheduler.client.report [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Deleted allocations for instance 506ea987-ea38-444b-81b7-f5343de14e4f [ 1096.511844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.512380] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Processing image f647401d-2329-4d44-8490-33aad50e15f2 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.512471] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2/f647401d-2329-4d44-8490-33aad50e15f2.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.512724] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2/f647401d-2329-4d44-8490-33aad50e15f2.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.512724] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.512987] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa186797-5213-49cd-af49-d15a9a6076c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.526733] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.527114] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.527912] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5635aa74-13ad-4d20-9207-8c36122da3f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.533031] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1096.533031] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529cbba8-7d16-7f4a-2fce-2b43078b64ca" [ 1096.533031] env[61839]: _type = "Task" [ 1096.533031] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.541144] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]529cbba8-7d16-7f4a-2fce-2b43078b64ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.609832] env[61839]: DEBUG nova.objects.instance [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'pci_requests' on Instance uuid 6b12ef55-b566-4a74-a794-b4e4c41debe1 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.948130] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526bf2b3-3afe-034f-0015-5c38f0788385, 'name': SearchDatastore_Task, 'duration_secs': 0.034057} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.948583] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.948957] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.949340] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.949600] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.949930] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.950304] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14c7048f-38c8-462d-830c-13849e099ef9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.957043] env[61839]: DEBUG oslo_concurrency.lockutils [None req-028c0fc7-ebb8-4b2f-8cd7-3d66b69227d0 tempest-DeleteServersTestJSON-1749383634 tempest-DeleteServersTestJSON-1749383634-project-member] Lock "506ea987-ea38-444b-81b7-f5343de14e4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.216s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.972459] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.972764] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.974699] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77f5a1c7-d2d5-430f-b4c1-442b4463975c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.982318] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1096.982318] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525d01e6-dd94-6163-e807-87fe5bac812c" [ 1096.982318] env[61839]: _type = "Task" [ 1096.982318] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.994276] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525d01e6-dd94-6163-e807-87fe5bac812c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.043346] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Preparing fetch location {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1097.043635] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Fetch image to [datastore2] OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266/OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266.vmdk {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1097.043829] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Downloading stream optimized image f647401d-2329-4d44-8490-33aad50e15f2 to [datastore2] OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266/OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266.vmdk on the data store datastore2 as vApp {{(pid=61839) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1097.044015] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Downloading image file data f647401d-2329-4d44-8490-33aad50e15f2 to the ESX as VM named 'OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266' {{(pid=61839) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1097.112769] env[61839]: DEBUG nova.objects.base [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Object Instance<6b12ef55-b566-4a74-a794-b4e4c41debe1> lazy-loaded attributes: flavor,pci_requests {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1097.113018] env[61839]: DEBUG nova.network.neutron [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1097.120897] env[61839]: DEBUG nova.compute.manager [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1097.121118] env[61839]: DEBUG nova.compute.manager [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing instance network info cache due to event network-changed-7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1097.121341] env[61839]: DEBUG oslo_concurrency.lockutils [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] Acquiring lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.121486] env[61839]: DEBUG oslo_concurrency.lockutils [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] Acquired lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.121733] env[61839]: DEBUG nova.network.neutron [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Refreshing network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.123578] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1097.123578] env[61839]: value = "resgroup-9" [ 1097.123578] env[61839]: _type = "ResourcePool" [ 1097.123578] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1097.124224] env[61839]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-aa553168-c8f7-49e2-b79d-12b265f98750 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.149546] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease: (returnval){ [ 1097.149546] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5239bd75-ba64-2734-7335-e71184ae1858" [ 1097.149546] env[61839]: _type = "HttpNfcLease" [ 1097.149546] env[61839]: } obtained for vApp import into resource pool (val){ [ 1097.149546] env[61839]: value = "resgroup-9" [ 1097.149546] env[61839]: _type = "ResourcePool" [ 1097.149546] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1097.149818] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the lease: (returnval){ [ 1097.149818] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5239bd75-ba64-2734-7335-e71184ae1858" [ 1097.149818] env[61839]: _type = "HttpNfcLease" [ 1097.149818] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1097.157365] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1097.157365] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5239bd75-ba64-2734-7335-e71184ae1858" [ 1097.157365] env[61839]: _type = "HttpNfcLease" [ 1097.157365] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1097.195583] env[61839]: DEBUG nova.policy [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76a29e808031416ab8895e89c337be6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e03467b7fba46a9aac1562a1cb8368e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1097.369364] env[61839]: DEBUG nova.network.neutron [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updated VIF entry in instance network info cache for port 7ee0f326-ead2-4849-823d-9d652c5c339b. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1097.369883] env[61839]: DEBUG nova.network.neutron [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [{"id": "7ee0f326-ead2-4849-823d-9d652c5c339b", "address": "fa:16:3e:d1:53:f9", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee0f326-ea", "ovs_interfaceid": "7ee0f326-ead2-4849-823d-9d652c5c339b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.493787] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525d01e6-dd94-6163-e807-87fe5bac812c, 'name': SearchDatastore_Task, 'duration_secs': 0.029765} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.494349] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9e220d3-8463-4336-bf41-d60d30a31391 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.499972] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1097.499972] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521a4f55-d15d-748d-ed3b-e1efee82d6df" [ 1097.499972] env[61839]: _type = "Task" [ 1097.499972] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.507807] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521a4f55-d15d-748d-ed3b-e1efee82d6df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.657675] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1097.657675] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5239bd75-ba64-2734-7335-e71184ae1858" [ 1097.657675] env[61839]: _type = "HttpNfcLease" [ 1097.657675] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1097.658364] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1097.658364] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5239bd75-ba64-2734-7335-e71184ae1858" [ 1097.658364] env[61839]: _type = "HttpNfcLease" [ 1097.658364] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1097.659094] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6530ca15-9f47-4598-8ea3-41ed03f26a61 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.666760] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b3b16b-9db2-89f8-e495-33293230fb00/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1097.666935] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b3b16b-9db2-89f8-e495-33293230fb00/disk-0.vmdk. {{(pid=61839) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1097.733654] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b78244b4-7abe-49a4-ad0f-3a2736410d8b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.873049] env[61839]: DEBUG oslo_concurrency.lockutils [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] Releasing lock "refresh_cache-dbd34858-9806-4d3f-b829-948651056da2" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.873332] env[61839]: DEBUG nova.compute.manager [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1097.873526] env[61839]: DEBUG nova.compute.manager [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing instance network info cache due to event network-changed-df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1097.873758] env[61839]: DEBUG oslo_concurrency.lockutils [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.873907] env[61839]: DEBUG oslo_concurrency.lockutils [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.874088] env[61839]: DEBUG nova.network.neutron [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.945098] env[61839]: INFO nova.compute.manager [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Swapping old allocation on dict_keys(['cef329e6-1ccd-42a8-bbc4-109a06d1c908']) held by migration 609e967d-234d-4e69-aef6-f40a329f11cf for instance [ 1097.973954] env[61839]: DEBUG nova.scheduler.client.report [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Overwriting current allocation {'allocations': {'cef329e6-1ccd-42a8-bbc4-109a06d1c908': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 135}}, 'project_id': '5f789f3900a347b59c491e9d141fb9e7', 'user_id': '8abcff2ffe534da3983ec78c3671110d', 'consumer_generation': 1} on consumer 625a8fc1-23fc-4035-855f-3d3a963cdcea {{(pid=61839) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1098.012020] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521a4f55-d15d-748d-ed3b-e1efee82d6df, 'name': SearchDatastore_Task, 'duration_secs': 0.008632} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.012344] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.012611] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1098.014389] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-789143c9-09e3-4bbf-acb0-66dcd6ae71b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.022170] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1098.022170] env[61839]: value = "task-1315076" [ 1098.022170] env[61839]: _type = "Task" [ 1098.022170] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.030535] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.078514] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.078758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.078952] env[61839]: DEBUG nova.network.neutron [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.236449] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Didn't find any instances for network info cache update. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1098.236668] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.236875] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.236968] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.237142] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.237290] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.237433] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.237564] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1098.237704] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.537107] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315076, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.638307] env[61839]: DEBUG nova.network.neutron [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updated VIF entry in instance network info cache for port df30d6b1-3fc6-465f-9b51-353f874ccd30. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.638707] env[61839]: DEBUG nova.network.neutron [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.740517] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.740760] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.740934] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.741108] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1098.742036] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b116bd-18dd-46ba-ba19-bfc06a42bee1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.751672] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fe5831-96a8-4836-9b6c-db7cce08fb32 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.767189] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fccb634-31b7-4a65-9842-e306e1bb317a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.774189] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115e36b4-8bbc-4aa9-9069-23be20a5f485 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.804424] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180710MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1098.804638] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.804843] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.016676] env[61839]: DEBUG nova.network.neutron [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [{"id": "da11baa5-354e-440b-a384-10cd83ff2715", "address": "fa:16:3e:1c:dc:5a", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda11baa5-35", "ovs_interfaceid": "da11baa5-354e-440b-a384-10cd83ff2715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.029410] env[61839]: DEBUG nova.network.neutron [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Successfully updated port: da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1099.034792] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315076, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568987} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.035318] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1099.035603] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1099.036149] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2ab8f06-9247-45ab-8cf8-8437695beaaa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.046633] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1099.046633] env[61839]: value = "task-1315077" [ 1099.046633] env[61839]: _type = "Task" [ 1099.046633] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.054545] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.083700] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Completed reading data from the image iterator. {{(pid=61839) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1099.084200] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b3b16b-9db2-89f8-e495-33293230fb00/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1099.085201] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f7fe99-bbd4-4c51-a675-16370d577496 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.091895] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b3b16b-9db2-89f8-e495-33293230fb00/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1099.092085] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b3b16b-9db2-89f8-e495-33293230fb00/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1099.092341] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c6719c40-42da-4ac1-8142-61d0af402537 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.146410] env[61839]: DEBUG oslo_concurrency.lockutils [req-371cccd0-a85c-4ea9-844c-a78c427d949a req-093ac3cb-cf66-40b0-8873-0527bd0da8a7 service nova] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.149381] env[61839]: DEBUG nova.compute.manager [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-vif-plugged-da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1099.149606] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] Acquiring lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.149789] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.149956] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.150138] env[61839]: DEBUG nova.compute.manager [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] No waiting events found dispatching network-vif-plugged-da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1099.150357] env[61839]: WARNING nova.compute.manager [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received unexpected event network-vif-plugged-da45fc64-4aa9-4af8-b820-45a5fe4da356 for instance with vm_state active and task_state None. [ 1099.150467] env[61839]: DEBUG nova.compute.manager [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-changed-da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1099.150626] env[61839]: DEBUG nova.compute.manager [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing instance network info cache due to event network-changed-da45fc64-4aa9-4af8-b820-45a5fe4da356. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1099.150807] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.150946] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.151116] env[61839]: DEBUG nova.network.neutron [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Refreshing network info cache for port da45fc64-4aa9-4af8-b820-45a5fe4da356 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1099.265213] env[61839]: DEBUG oslo_vmware.rw_handles [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b3b16b-9db2-89f8-e495-33293230fb00/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1099.265524] env[61839]: INFO nova.virt.vmwareapi.images [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Downloaded image file data f647401d-2329-4d44-8490-33aad50e15f2 [ 1099.266585] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be652ac-f2df-4e12-b1c6-ce18c7d95955 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.283755] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5968371-88a1-485e-a6c1-af63053eed43 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.305041] env[61839]: INFO nova.virt.vmwareapi.images [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] The imported VM was unregistered [ 1099.307693] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Caching image {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1099.307938] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.308223] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a60d8107-adbe-4363-b72b-81acb80f3a73 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.319721] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Created directory with path [datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2 {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.319721] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266/OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266.vmdk to [datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2/f647401d-2329-4d44-8490-33aad50e15f2.vmdk. {{(pid=61839) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1099.319951] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-aaaac645-78d7-4be6-a800-e1722fd62a91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.326324] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1099.326324] env[61839]: value = "task-1315079" [ 1099.326324] env[61839]: _type = "Task" [ 1099.326324] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.334498] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315079, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.521147] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-625a8fc1-23fc-4035-855f-3d3a963cdcea" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.522204] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ef8086-855a-4f33-9bd8-a8b7ec25c758 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.530541] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7da898e-c234-495a-b7c2-8affff5e4c18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.536051] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.555924] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068148} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.556306] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1099.557051] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ebb49c-6eaa-4986-8c5b-328af49c1a0f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.580312] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.580853] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65e2dada-713c-4d93-abdd-ac8cf6eff03d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.601492] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1099.601492] env[61839]: value = "task-1315080" [ 1099.601492] env[61839]: _type = "Task" [ 1099.601492] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.610184] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.835874] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315079, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 25c574c4-e39b-4009-a562-a4a5bf74a40c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance dbd34858-9806-4d3f-b829-948651056da2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 6b12ef55-b566-4a74-a794-b4e4c41debe1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 625a8fc1-23fc-4035-855f-3d3a963cdcea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a1defab7-8433-411d-b7e2-c31f6a34b8e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1099.842021] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1099.890702] env[61839]: DEBUG nova.network.neutron [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Added VIF to instance network info cache for port da45fc64-4aa9-4af8-b820-45a5fe4da356. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1099.891170] env[61839]: DEBUG nova.network.neutron [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "address": "fa:16:3e:df:3c:4e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda45fc64-4a", "ovs_interfaceid": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.932852] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385df8f3-ae72-4beb-b97c-ccabb297e9b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.941551] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1544be06-7417-44c8-be9d-34674529365a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.974374] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff8ebfa-8de8-4be6-9d30-d099316086f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.982252] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae2ac1c-3203-4502-be95-033cc2575717 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.995910] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1100.113097] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315080, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.337459] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315079, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.393938] env[61839]: DEBUG oslo_concurrency.lockutils [req-e5be1e65-b4b1-4a81-b690-a55586c0de7b req-67176512-f5b4-4604-a8af-dacfd5fc0405 service nova] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.394923] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.395167] env[61839]: DEBUG nova.network.neutron [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1100.518761] env[61839]: ERROR nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [req-76025b7d-b107-49a8-a546-6f2cd282ff11] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-76025b7d-b107-49a8-a546-6f2cd282ff11"}]} [ 1100.535931] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1100.552029] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1100.552211] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1100.565041] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1100.586201] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1100.614912] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315080, 'name': ReconfigVM_Task, 'duration_secs': 0.968433} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.617861] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c/25c574c4-e39b-4009-a562-a4a5bf74a40c.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1100.619066] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'disk_bus': None, 'device_type': 'disk', 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'encryption_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'image_id': 'e497cc62-282a-4a70-9770-22d80d8a1013'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'}, 'guest_format': None, 'attachment_id': 'df9075e0-c7b7-4e11-befc-0362081f6692', 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'boot_index': None, 'volume_type': None}], 'swap': None} {{(pid=61839) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1100.619303] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1100.619502] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1100.620444] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.621631] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30faf498-ca32-4eab-9ff5-3defc5f8d754 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.623844] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cc0b14f-4dde-4348-9337-095bb99f2f5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.644771] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7f0b73-6f9c-4b82-a57d-1c75d91b7b3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.647817] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1100.647817] env[61839]: value = "task-1315081" [ 1100.647817] env[61839]: _type = "Task" [ 1100.647817] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.674483] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88/volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.678165] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f321b53e-83fc-4a31-b81d-22ad86ed8858 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.526309] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315081, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.526560] env[61839]: WARNING oslo_vmware.common.loopingcall [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] task run outlasted interval by 0.37817 sec [ 1101.535782] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1101.535782] env[61839]: value = "task-1315082" [ 1101.535782] env[61839]: _type = "Task" [ 1101.535782] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.548578] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315079, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.548578] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315081, 'name': PowerOffVM_Task, 'duration_secs': 0.203755} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.549262] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1101.549930] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1101.550218] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1101.550430] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.550783] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1101.550861] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.550958] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1101.551188] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1101.551361] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1101.551580] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1101.551795] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1101.551990] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1101.561295] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94dbbd8b-1a36-4386-b264-5bcc25f2cea5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.572305] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.580513] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1101.580513] env[61839]: value = "task-1315083" [ 1101.580513] env[61839]: _type = "Task" [ 1101.580513] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.585987] env[61839]: WARNING nova.network.neutron [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] 41c98894-de91-45eb-a390-6217e0f9dca5 already exists in list: networks containing: ['41c98894-de91-45eb-a390-6217e0f9dca5']. ignoring it [ 1101.586138] env[61839]: WARNING nova.network.neutron [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] 41c98894-de91-45eb-a390-6217e0f9dca5 already exists in list: networks containing: ['41c98894-de91-45eb-a390-6217e0f9dca5']. ignoring it [ 1101.586468] env[61839]: WARNING nova.network.neutron [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] da45fc64-4aa9-4af8-b820-45a5fe4da356 already exists in list: port_ids containing: ['da45fc64-4aa9-4af8-b820-45a5fe4da356']. ignoring it [ 1101.593572] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f989d466-df04-4c04-a836-f7f8a5790272 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.596464] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315083, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.601710] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae777b3-291f-4a1d-b749-dc291591903f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.634020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fa0585-1f9f-478b-a80a-89f53489bea1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.640953] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094f3686-0f5e-4a4c-abc0-36945a465643 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.655616] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1102.035532] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315079, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.344232} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.035532] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266/OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266.vmdk to [datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2/f647401d-2329-4d44-8490-33aad50e15f2.vmdk. [ 1102.035623] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Cleaning up location [datastore2] OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1102.035797] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_3d4ffe95-bfb6-46a0-99a3-9526e05f7266 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1102.036059] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11a7882e-3f71-49a1-87cb-4f71040c849a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.047167] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.048388] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1102.048388] env[61839]: value = "task-1315084" [ 1102.048388] env[61839]: _type = "Task" [ 1102.048388] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.055578] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.089530] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315083, 'name': ReconfigVM_Task, 'duration_secs': 0.396524} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.090338] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac6ab86-ceb5-494f-953b-0d1672a62c5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.111012] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1102.111268] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1102.111437] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1102.111633] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1102.111812] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1102.111971] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1102.112224] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1102.112415] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1102.112605] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1102.112780] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1102.112958] env[61839]: DEBUG nova.virt.hardware [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1102.113788] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baf4983a-f2f9-42a4-815e-0961abd17706 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.120873] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1102.120873] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5295fa74-4090-a7c8-d6e6-6dcf2ed2f0b3" [ 1102.120873] env[61839]: _type = "Task" [ 1102.120873] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.129155] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5295fa74-4090-a7c8-d6e6-6dcf2ed2f0b3, 'name': SearchDatastore_Task, 'duration_secs': 0.006451} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.134450] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1102.136520] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3902f427-71ee-4747-b468-08348c27feaf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.155874] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1102.155874] env[61839]: value = "task-1315085" [ 1102.155874] env[61839]: _type = "Task" [ 1102.155874] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.166508] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315085, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.200210] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 136 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1102.200451] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 136 to 137 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1102.200602] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1102.378313] env[61839]: DEBUG nova.network.neutron [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "address": "fa:16:3e:df:3c:4e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda45fc64-4a", "ovs_interfaceid": "da45fc64-4aa9-4af8-b820-45a5fe4da356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.548395] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315082, 'name': ReconfigVM_Task, 'duration_secs': 0.887127} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.549045] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfigured VM instance instance-00000067 to attach disk [datastore2] volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88/volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.553321] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dee3023e-cceb-4df1-92d6-c936a7d845d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.571215] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041238} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.572245] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1102.572427] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2/f647401d-2329-4d44-8490-33aad50e15f2.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.572681] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2/f647401d-2329-4d44-8490-33aad50e15f2.vmdk to [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1102.573546] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1102.573546] env[61839]: value = "task-1315086" [ 1102.573546] env[61839]: _type = "Task" [ 1102.573546] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.573546] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1b6e922-d7ee-4e40-97c2-16a12ba98dfd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.583095] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315086, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.584375] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1102.584375] env[61839]: value = "task-1315087" [ 1102.584375] env[61839]: _type = "Task" [ 1102.584375] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.591910] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315087, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.666423] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315085, 'name': ReconfigVM_Task, 'duration_secs': 0.302928} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.666785] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1102.667599] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe53015-0a29-41f9-8519-3c4287e56b57 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.693969] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.694506] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea9aa814-bfc3-488c-8b77-0e7bcd3a22b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.708160] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1102.708367] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.903s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.714057] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1102.714057] env[61839]: value = "task-1315088" [ 1102.714057] env[61839]: _type = "Task" [ 1102.714057] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.730458] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.882032] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.882179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.882345] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.883210] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b2cdcd-3510-44a5-b8ba-b4039c1280cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.902777] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1102.903037] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1102.903279] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1102.903676] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1102.903869] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1102.904053] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1102.904283] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1102.904448] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1102.904619] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1102.904784] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1102.904971] env[61839]: DEBUG nova.virt.hardware [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1102.911466] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Reconfiguring VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1102.912281] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ec636f6-d454-4f42-8beb-a9d2c9b1d0ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.930834] env[61839]: DEBUG oslo_vmware.api [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1102.930834] env[61839]: value = "task-1315089" [ 1102.930834] env[61839]: _type = "Task" [ 1102.930834] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.939479] env[61839]: DEBUG oslo_vmware.api [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315089, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.085544] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315086, 'name': ReconfigVM_Task, 'duration_secs': 0.174883} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.089057] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1103.089660] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7c6f124-2f40-4351-a4c0-5b6663333b80 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.096674] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315087, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.098066] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1103.098066] env[61839]: value = "task-1315090" [ 1103.098066] env[61839]: _type = "Task" [ 1103.098066] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.111949] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315090, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.227496] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315088, 'name': ReconfigVM_Task, 'duration_secs': 0.290346} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.227611] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea/625a8fc1-23fc-4035-855f-3d3a963cdcea.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.228765] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d97d149-8a3b-4661-a94f-32f8e74dcabb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.250635] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3250c2ca-9c23-42ba-b0bd-c5cb76590e9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.271994] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821ba0b0-2b46-495d-b1c8-25486e7d341f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.296114] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ed5ece-6cf5-4f2c-8598-14efd8af52eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.304417] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.305114] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f62b2399-7b6a-4a3a-8daa-1a6fa7b88e1a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.311795] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1103.311795] env[61839]: value = "task-1315091" [ 1103.311795] env[61839]: _type = "Task" [ 1103.311795] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.320224] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.441385] env[61839]: DEBUG oslo_vmware.api [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315089, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.595080] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315087, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.607279] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315090, 'name': Rename_Task, 'duration_secs': 0.417376} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.607589] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.607852] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80ac43db-69c1-4d04-9e70-49587add4d08 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.614493] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1103.614493] env[61839]: value = "task-1315092" [ 1103.614493] env[61839]: _type = "Task" [ 1103.614493] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.623122] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.821887] env[61839]: DEBUG oslo_vmware.api [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315091, 'name': PowerOnVM_Task, 'duration_secs': 0.410786} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.822232] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.943080] env[61839]: DEBUG oslo_vmware.api [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315089, 'name': ReconfigVM_Task, 'duration_secs': 0.75371} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.943647] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.943883] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Reconfigured VM to attach interface {{(pid=61839) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1104.096657] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315087, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.124106] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315092, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.449071] env[61839]: DEBUG oslo_concurrency.lockutils [None req-15f2500c-8f33-49fb-8999-1e6b16dce139 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-6b12ef55-b566-4a74-a794-b4e4c41debe1-da45fc64-4aa9-4af8-b820-45a5fe4da356" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.489s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.597918] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315087, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.625217] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315092, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.870907] env[61839]: INFO nova.compute.manager [None req-3e9c49a4-6e8c-4a13-b5bd-2b12915bc206 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance to original state: 'active' [ 1105.097291] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315087, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.125210] env[61839]: DEBUG oslo_vmware.api [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315092, 'name': PowerOnVM_Task, 'duration_secs': 1.200653} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.125497] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.125713] env[61839]: DEBUG nova.compute.manager [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1105.126554] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17649bf6-a2b6-41b3-8946-1dc5db42bc10 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.597844] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315087, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.563546} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.597975] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f647401d-2329-4d44-8490-33aad50e15f2/f647401d-2329-4d44-8490-33aad50e15f2.vmdk to [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1105.598769] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d7d096-73c5-4ae8-9a70-4c322cfd0054 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.620743] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.621041] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00001819-f609-48c9-a7c8-586a6d9c56e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.642324] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "interface-6b12ef55-b566-4a74-a794-b4e4c41debe1-da45fc64-4aa9-4af8-b820-45a5fe4da356" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.643167] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-6b12ef55-b566-4a74-a794-b4e4c41debe1-da45fc64-4aa9-4af8-b820-45a5fe4da356" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.646732] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1105.646732] env[61839]: value = "task-1315093" [ 1105.646732] env[61839]: _type = "Task" [ 1105.646732] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.650167] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.650403] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.650582] env[61839]: DEBUG nova.objects.instance [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61839) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1105.659589] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315093, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.148454] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.148688] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.149734] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed13e58-b285-4cb0-bed5-b4007ecbffd3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.165310] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315093, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.178111] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.178380] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.178616] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.178815] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.179040] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.181895] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b72f4f-e703-4521-a25a-c1b570b17beb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.184747] env[61839]: INFO nova.compute.manager [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Terminating instance [ 1106.187758] env[61839]: DEBUG nova.compute.manager [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1106.187970] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1106.188842] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec05dd2c-720c-4441-b1de-283b870af5f5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.212091] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Reconfiguring VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1106.213524] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54485692-2d90-4501-ac1a-fe9321581266 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.226586] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1106.226586] env[61839]: value = "task-1315094" [ 1106.226586] env[61839]: _type = "Task" [ 1106.226586] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.232135] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1106.232135] env[61839]: value = "task-1315095" [ 1106.232135] env[61839]: _type = "Task" [ 1106.232135] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.235090] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315094, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.242446] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.659394] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315093, 'name': ReconfigVM_Task, 'duration_secs': 0.547075} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.659751] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Reconfigured VM instance instance-00000065 to attach disk [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0/a1defab7-8433-411d-b7e2-c31f6a34b8e0.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.660299] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f1ece66-d24b-4434-a92b-44b7ccf165d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.662721] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d4141cf3-51df-4c8e-b676-b032690d2a4b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.669131] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1106.669131] env[61839]: value = "task-1315096" [ 1106.669131] env[61839]: _type = "Task" [ 1106.669131] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.677362] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315096, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.735874] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315094, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.743662] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.749179] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.749420] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.180997] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315096, 'name': Rename_Task, 'duration_secs': 0.228132} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.181304] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1107.181557] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0368db01-4943-46da-abd8-cacfb93d75a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.188335] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1107.188335] env[61839]: value = "task-1315097" [ 1107.188335] env[61839]: _type = "Task" [ 1107.188335] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.195781] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315097, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.236606] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315094, 'name': PowerOffVM_Task, 'duration_secs': 0.656545} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.240235] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1107.240507] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1107.240730] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281450', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'name': 'volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '625a8fc1-23fc-4035-855f-3d3a963cdcea', 'attached_at': '2024-10-18T17:02:39.000000', 'detached_at': '', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'serial': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1107.241543] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72045dc5-b442-4315-bd16-9f2de4c8233b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.248684] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.264576] env[61839]: INFO nova.compute.manager [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Detaching volume bf363321-5f33-48dc-9ed5-40c5f2cf0d88 [ 1107.267568] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f956ada-8aaa-49e4-ab0e-3e793d603b04 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.276496] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a952574-2148-4b52-85af-102744d873a7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.298042] env[61839]: INFO nova.virt.block_device [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Attempting to driver detach volume bf363321-5f33-48dc-9ed5-40c5f2cf0d88 from mountpoint /dev/sdb [ 1107.298291] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1107.298484] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1107.299291] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e009b354-394a-4b6d-9ca2-2eac929a0cda {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.302436] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14eb6c1a-fc8f-41a5-96d5-08ab6c8aeb42 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.333721] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] The volume has not been displaced from its original location: [datastore1] volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c/volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1107.339086] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1107.339459] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-128b351c-0d75-403e-9af6-c3d3bb7889d7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.352818] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe4c035-059b-4ae1-81da-b5d8c097dc50 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.361568] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a88bf32-a07f-4868-b5bb-0e93659ccea6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.364017] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1107.364017] env[61839]: value = "task-1315098" [ 1107.364017] env[61839]: _type = "Task" [ 1107.364017] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.383430] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70546d9-b21f-434a-8c79-17543ec3001c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.389301] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315098, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.401847] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] The volume has not been displaced from its original location: [datastore2] volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88/volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1107.407198] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1107.407543] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee1a0955-a8e9-46c5-bbe4-361ee4947ae8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.425309] env[61839]: DEBUG oslo_vmware.api [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1107.425309] env[61839]: value = "task-1315099" [ 1107.425309] env[61839]: _type = "Task" [ 1107.425309] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.432969] env[61839]: DEBUG oslo_vmware.api [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.699162] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315097, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.745538] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.873656] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315098, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.935271] env[61839]: DEBUG oslo_vmware.api [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315099, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.199619] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315097, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.245419] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.374498] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315098, 'name': ReconfigVM_Task, 'duration_secs': 0.865854} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.374783] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1108.379491] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5802a22b-f94f-42c8-8343-f2a4fcc47176 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.393589] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1108.393589] env[61839]: value = "task-1315100" [ 1108.393589] env[61839]: _type = "Task" [ 1108.393589] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.402298] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.435300] env[61839]: DEBUG oslo_vmware.api [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315099, 'name': ReconfigVM_Task, 'duration_secs': 0.828032} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.435572] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1108.440098] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cce7adf3-d0f7-4fa5-be4c-80bc2e8b2871 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.454339] env[61839]: DEBUG oslo_vmware.api [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1108.454339] env[61839]: value = "task-1315101" [ 1108.454339] env[61839]: _type = "Task" [ 1108.454339] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.463259] env[61839]: DEBUG oslo_vmware.api [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.699432] env[61839]: DEBUG oslo_vmware.api [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315097, 'name': PowerOnVM_Task, 'duration_secs': 1.139322} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.699785] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1108.745140] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.794963] env[61839]: DEBUG nova.compute.manager [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1108.795831] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49b7cb6-40cf-4501-a81e-3755933e09aa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.903181] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315100, 'name': ReconfigVM_Task, 'duration_secs': 0.167704} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.903449] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281450', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'name': 'volume-d4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '625a8fc1-23fc-4035-855f-3d3a963cdcea', 'attached_at': '2024-10-18T17:02:39.000000', 'detached_at': '', 'volume_id': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c', 'serial': 'd4d58d24-9376-47ab-ae70-ee11dcb0e43c'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1108.903741] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1108.904536] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53e1200-17cf-431a-9055-582cde39c3e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.911052] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1108.911285] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-480de934-72f4-4f55-bcab-2ad1dca2fbd1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.964669] env[61839]: DEBUG oslo_vmware.api [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315101, 'name': ReconfigVM_Task, 'duration_secs': 0.138935} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.965113] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281454', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'name': 'volume-bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25c574c4-e39b-4009-a562-a4a5bf74a40c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88', 'serial': 'bf363321-5f33-48dc-9ed5-40c5f2cf0d88'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1108.969678] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1108.969896] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1108.970110] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleting the datastore file [datastore1] 625a8fc1-23fc-4035-855f-3d3a963cdcea {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.970391] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-741ea34e-a55a-490a-bdb1-ed5781b9c80c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.977150] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1108.977150] env[61839]: value = "task-1315103" [ 1108.977150] env[61839]: _type = "Task" [ 1108.977150] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.984703] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.246584] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.310758] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8fcfcbf5-3f0a-491e-bfdb-c64d3d7aaef6 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.543s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.486859] env[61839]: DEBUG oslo_vmware.api [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178211} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.487132] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1109.487350] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1109.487558] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1109.487737] env[61839]: INFO nova.compute.manager [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Took 3.30 seconds to destroy the instance on the hypervisor. [ 1109.487977] env[61839]: DEBUG oslo.service.loopingcall [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1109.488184] env[61839]: DEBUG nova.compute.manager [-] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1109.488278] env[61839]: DEBUG nova.network.neutron [-] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1109.509888] env[61839]: DEBUG nova.objects.instance [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'flavor' on Instance uuid 25c574c4-e39b-4009-a562-a4a5bf74a40c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.748096] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.905170] env[61839]: DEBUG nova.compute.manager [req-d6566a73-87fa-4479-91c4-852439650447 req-83aa74e7-b513-4b6c-b501-edca660124d5 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Received event network-vif-deleted-da11baa5-354e-440b-a384-10cd83ff2715 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1109.905170] env[61839]: INFO nova.compute.manager [req-d6566a73-87fa-4479-91c4-852439650447 req-83aa74e7-b513-4b6c-b501-edca660124d5 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Neutron deleted interface da11baa5-354e-440b-a384-10cd83ff2715; detaching it from the instance and deleting it from the info cache [ 1109.905443] env[61839]: DEBUG nova.network.neutron [req-d6566a73-87fa-4479-91c4-852439650447 req-83aa74e7-b513-4b6c-b501-edca660124d5 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.250923] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.384483] env[61839]: DEBUG nova.network.neutron [-] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.408152] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-152211c6-0d16-424a-8241-6c2e5c8a430b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.418973] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a771abad-7f7e-4137-8a25-42edf9b51bc1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.444403] env[61839]: DEBUG nova.compute.manager [req-d6566a73-87fa-4479-91c4-852439650447 req-83aa74e7-b513-4b6c-b501-edca660124d5 service nova] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Detach interface failed, port_id=da11baa5-354e-440b-a384-10cd83ff2715, reason: Instance 625a8fc1-23fc-4035-855f-3d3a963cdcea could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1110.516404] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e823b6fb-d1df-4f97-ae73-0ab24098e488 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.767s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.749595] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.887138] env[61839]: INFO nova.compute.manager [-] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Took 1.40 seconds to deallocate network for instance. [ 1111.008028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.008028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.008028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "25c574c4-e39b-4009-a562-a4a5bf74a40c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.008028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.008028] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.010057] env[61839]: INFO nova.compute.manager [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Terminating instance [ 1111.011798] env[61839]: DEBUG nova.compute.manager [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1111.011998] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1111.012828] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e747c49-11d4-42ce-97a4-4db6cc53bfe9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.021352] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.021581] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-646f59df-5785-4a58-9d11-4e77d2a340c4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.027408] env[61839]: DEBUG oslo_vmware.api [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1111.027408] env[61839]: value = "task-1315104" [ 1111.027408] env[61839]: _type = "Task" [ 1111.027408] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.035545] env[61839]: DEBUG oslo_vmware.api [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.250726] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.429856] env[61839]: INFO nova.compute.manager [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Took 0.54 seconds to detach 1 volumes for instance. [ 1111.536912] env[61839]: DEBUG oslo_vmware.api [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315104, 'name': PowerOffVM_Task, 'duration_secs': 0.221863} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.537267] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1111.537491] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1111.537779] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f926ba24-829a-40fd-a205-7f8800d1cfd9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.639829] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1111.640165] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1111.640393] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleting the datastore file [datastore2] 25c574c4-e39b-4009-a562-a4a5bf74a40c {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1111.640736] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcbc10dc-3b20-409a-a674-d49862ec5803 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.646490] env[61839]: DEBUG oslo_vmware.api [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1111.646490] env[61839]: value = "task-1315106" [ 1111.646490] env[61839]: _type = "Task" [ 1111.646490] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.655717] env[61839]: DEBUG oslo_vmware.api [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.750792] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.936306] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.936617] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.936849] env[61839]: DEBUG nova.objects.instance [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'resources' on Instance uuid 625a8fc1-23fc-4035-855f-3d3a963cdcea {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.156296] env[61839]: DEBUG oslo_vmware.api [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128172} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.156588] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1112.156783] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1112.156962] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1112.157158] env[61839]: INFO nova.compute.manager [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1112.157398] env[61839]: DEBUG oslo.service.loopingcall [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1112.157589] env[61839]: DEBUG nova.compute.manager [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1112.157680] env[61839]: DEBUG nova.network.neutron [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1112.251286] env[61839]: DEBUG oslo_vmware.api [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315095, 'name': ReconfigVM_Task, 'duration_secs': 5.728018} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.251532] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.251742] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Reconfigured VM to detach interface {{(pid=61839) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1112.522108] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b95fbc-4b8c-442a-9cfa-4faf6c02ec73 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.529856] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6359f96d-6f55-414e-800a-c32330fd49eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.561018] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce734d2-2edb-4f39-a7a4-1054e676e786 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.566256] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b40e517-6446-4086-a6ed-e518000d2fae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.579946] env[61839]: DEBUG nova.compute.provider_tree [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.771520] env[61839]: DEBUG nova.compute.manager [req-99b2ee3e-aeeb-44cc-bd71-b80615e0008e req-05958d0b-2705-4aad-a824-efe86b556730 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Received event network-vif-deleted-98c98e8e-abd7-4f81-9969-154f0e05908e {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1112.771783] env[61839]: INFO nova.compute.manager [req-99b2ee3e-aeeb-44cc-bd71-b80615e0008e req-05958d0b-2705-4aad-a824-efe86b556730 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Neutron deleted interface 98c98e8e-abd7-4f81-9969-154f0e05908e; detaching it from the instance and deleting it from the info cache [ 1112.772545] env[61839]: DEBUG nova.network.neutron [req-99b2ee3e-aeeb-44cc-bd71-b80615e0008e req-05958d0b-2705-4aad-a824-efe86b556730 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.082733] env[61839]: DEBUG nova.scheduler.client.report [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1113.249385] env[61839]: DEBUG nova.network.neutron [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.274864] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8799a4e-c91d-4d57-a8cf-57e060d9b5bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.284959] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6712ac8-5d4d-486f-a773-e0f968451b6c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.310436] env[61839]: DEBUG nova.compute.manager [req-99b2ee3e-aeeb-44cc-bd71-b80615e0008e req-05958d0b-2705-4aad-a824-efe86b556730 service nova] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Detach interface failed, port_id=98c98e8e-abd7-4f81-9969-154f0e05908e, reason: Instance 25c574c4-e39b-4009-a562-a4a5bf74a40c could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1113.589074] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.612372] env[61839]: INFO nova.scheduler.client.report [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted allocations for instance 625a8fc1-23fc-4035-855f-3d3a963cdcea [ 1113.727597] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1113.728260] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquired lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.728416] env[61839]: DEBUG nova.network.neutron [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.751161] env[61839]: INFO nova.compute.manager [-] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Took 1.59 seconds to deallocate network for instance. [ 1114.049780] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.050166] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.050380] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.050626] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.050844] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.053016] env[61839]: INFO nova.compute.manager [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Terminating instance [ 1114.054789] env[61839]: DEBUG nova.compute.manager [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1114.055116] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.056024] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336bf5bf-bb59-49c5-bf91-3632ae5f6463 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.065582] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.065813] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-400a40e9-08fd-40f8-8a91-50d8547aef87 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.071442] env[61839]: DEBUG oslo_vmware.api [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1114.071442] env[61839]: value = "task-1315107" [ 1114.071442] env[61839]: _type = "Task" [ 1114.071442] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.079170] env[61839]: DEBUG oslo_vmware.api [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315107, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.121839] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3ed0aaf0-7e70-4643-9dfe-5cbd0da38c9e tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "625a8fc1-23fc-4035-855f-3d3a963cdcea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.943s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.258122] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.258408] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.258646] env[61839]: DEBUG nova.objects.instance [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'resources' on Instance uuid 25c574c4-e39b-4009-a562-a4a5bf74a40c {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.509268] env[61839]: INFO nova.network.neutron [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Port da45fc64-4aa9-4af8-b820-45a5fe4da356 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1114.509640] env[61839]: DEBUG nova.network.neutron [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [{"id": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "address": "fa:16:3e:0c:09:8e", "network": {"id": "41c98894-de91-45eb-a390-6217e0f9dca5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1040806357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e03467b7fba46a9aac1562a1cb8368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf30d6b1-3f", "ovs_interfaceid": "df30d6b1-3fc6-465f-9b51-353f874ccd30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.581568] env[61839]: DEBUG oslo_vmware.api [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315107, 'name': PowerOffVM_Task, 'duration_secs': 0.170414} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.581712] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.581780] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.582053] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-854a874e-0be8-454d-a4d7-971658490e8c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.640372] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.640535] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.640930] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleting the datastore file [datastore1] 6b12ef55-b566-4a74-a794-b4e4c41debe1 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.640993] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b66a3576-27df-4916-8a75-8e4d5daf2da5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.648356] env[61839]: DEBUG oslo_vmware.api [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1114.648356] env[61839]: value = "task-1315109" [ 1114.648356] env[61839]: _type = "Task" [ 1114.648356] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.659291] env[61839]: DEBUG oslo_vmware.api [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.840660] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964aab4e-ea38-40fd-bf63-16f77d51acc7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.847986] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e39396-a213-46c2-9897-6a05fb7daaa0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.877210] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a367b043-2758-49d6-8f47-8dab1e7ceabd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.884646] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a07b78c-3906-40db-8484-8508190b88b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.899064] env[61839]: DEBUG nova.compute.provider_tree [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.012407] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Releasing lock "refresh_cache-6b12ef55-b566-4a74-a794-b4e4c41debe1" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.159188] env[61839]: DEBUG oslo_vmware.api [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148596} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.159545] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.159690] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.159877] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.160068] env[61839]: INFO nova.compute.manager [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1115.160319] env[61839]: DEBUG oslo.service.loopingcall [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1115.160508] env[61839]: DEBUG nova.compute.manager [-] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1115.160603] env[61839]: DEBUG nova.network.neutron [-] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1115.402311] env[61839]: DEBUG nova.scheduler.client.report [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1115.518026] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1467c3b1-fb8a-4a01-bc59-1f59e85b0a75 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "interface-6b12ef55-b566-4a74-a794-b4e4c41debe1-da45fc64-4aa9-4af8-b820-45a5fe4da356" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.875s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.576455] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.576836] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.836474] env[61839]: DEBUG nova.compute.manager [req-4fd4d8ec-f385-4287-a701-de66d1d9c6ae req-078d0cad-0828-4253-a682-39594847cf17 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Received event network-vif-deleted-df30d6b1-3fc6-465f-9b51-353f874ccd30 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1115.836816] env[61839]: INFO nova.compute.manager [req-4fd4d8ec-f385-4287-a701-de66d1d9c6ae req-078d0cad-0828-4253-a682-39594847cf17 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Neutron deleted interface df30d6b1-3fc6-465f-9b51-353f874ccd30; detaching it from the instance and deleting it from the info cache [ 1115.837096] env[61839]: DEBUG nova.network.neutron [req-4fd4d8ec-f385-4287-a701-de66d1d9c6ae req-078d0cad-0828-4253-a682-39594847cf17 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.906584] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.928346] env[61839]: INFO nova.scheduler.client.report [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted allocations for instance 25c574c4-e39b-4009-a562-a4a5bf74a40c [ 1116.079601] env[61839]: DEBUG nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1116.313289] env[61839]: DEBUG nova.network.neutron [-] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.339897] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25ab7e57-b53f-4736-af36-2730e3fe9d91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.351048] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ace5ab3-477e-4d50-a6cd-369eedc91052 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.376878] env[61839]: DEBUG nova.compute.manager [req-4fd4d8ec-f385-4287-a701-de66d1d9c6ae req-078d0cad-0828-4253-a682-39594847cf17 service nova] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Detach interface failed, port_id=df30d6b1-3fc6-465f-9b51-353f874ccd30, reason: Instance 6b12ef55-b566-4a74-a794-b4e4c41debe1 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1116.435899] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3a8f5f46-b611-4d7d-a8a8-5414f5deb339 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "25c574c4-e39b-4009-a562-a4a5bf74a40c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.429s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.600431] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.600679] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.602680] env[61839]: INFO nova.compute.claims [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1116.815659] env[61839]: INFO nova.compute.manager [-] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Took 1.65 seconds to deallocate network for instance. [ 1117.322386] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.677779] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4d5c0b-7ef9-432f-9ddb-e19697c7bcee {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.685797] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72dead9-8a22-48cb-a5d4-f3190404d532 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.714961] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11e0b52-575f-4b93-95e7-0d12f0fff47b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.721999] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c045f41a-92ac-48a5-aaf7-68b09e7f0f84 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.735953] env[61839]: DEBUG nova.compute.provider_tree [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.239374] env[61839]: DEBUG nova.scheduler.client.report [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1118.510339] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "3e153d8a-e069-443c-9db4-7614a6475971" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.510627] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "3e153d8a-e069-443c-9db4-7614a6475971" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.744025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.143s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.744589] env[61839]: DEBUG nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1118.747314] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.425s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.747888] env[61839]: DEBUG nova.objects.instance [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'resources' on Instance uuid 6b12ef55-b566-4a74-a794-b4e4c41debe1 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.013164] env[61839]: DEBUG nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1119.250979] env[61839]: DEBUG nova.compute.utils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1119.255068] env[61839]: DEBUG nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1119.255241] env[61839]: DEBUG nova.network.neutron [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1119.306233] env[61839]: DEBUG nova.policy [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8abcff2ffe534da3983ec78c3671110d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f789f3900a347b59c491e9d141fb9e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1119.333143] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6536c8-e641-4d0d-a47f-453b99fef27e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.342607] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46091ed9-3185-45fd-8f36-aa43a4a496ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.376402] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46786aef-3a13-4ac5-97fa-7814dd7eabe3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.386146] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cb520d-7b06-4a0c-818e-4ec9da5857e2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.404101] env[61839]: DEBUG nova.compute.provider_tree [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.531469] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.597328] env[61839]: DEBUG nova.network.neutron [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Successfully created port: 539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1119.755829] env[61839]: DEBUG nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1119.907838] env[61839]: DEBUG nova.scheduler.client.report [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1120.413085] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.415494] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.884s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.417210] env[61839]: INFO nova.compute.claims [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1120.434038] env[61839]: INFO nova.scheduler.client.report [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted allocations for instance 6b12ef55-b566-4a74-a794-b4e4c41debe1 [ 1120.767052] env[61839]: DEBUG nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1120.790405] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1120.790661] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1120.790834] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.791027] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1120.791190] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.791343] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1120.791558] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1120.791723] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1120.791895] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1120.792076] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1120.792258] env[61839]: DEBUG nova.virt.hardware [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1120.793124] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71301fdd-0c08-4891-bc15-531c7b7d85d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.801128] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2ccc86-0913-4679-9018-6db71ce33654 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.941345] env[61839]: DEBUG oslo_concurrency.lockutils [None req-505bc837-5056-492d-b626-39ae48fcae44 tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "6b12ef55-b566-4a74-a794-b4e4c41debe1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.891s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.190295] env[61839]: DEBUG nova.compute.manager [req-1de6fa68-9f58-4a03-8b69-b082fcecd329 req-afe6cfd3-d195-400b-b072-35aac37ac261 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-vif-plugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.190530] env[61839]: DEBUG oslo_concurrency.lockutils [req-1de6fa68-9f58-4a03-8b69-b082fcecd329 req-afe6cfd3-d195-400b-b072-35aac37ac261 service nova] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.190748] env[61839]: DEBUG oslo_concurrency.lockutils [req-1de6fa68-9f58-4a03-8b69-b082fcecd329 req-afe6cfd3-d195-400b-b072-35aac37ac261 service nova] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.190924] env[61839]: DEBUG oslo_concurrency.lockutils [req-1de6fa68-9f58-4a03-8b69-b082fcecd329 req-afe6cfd3-d195-400b-b072-35aac37ac261 service nova] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.191654] env[61839]: DEBUG nova.compute.manager [req-1de6fa68-9f58-4a03-8b69-b082fcecd329 req-afe6cfd3-d195-400b-b072-35aac37ac261 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] No waiting events found dispatching network-vif-plugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1121.192146] env[61839]: WARNING nova.compute.manager [req-1de6fa68-9f58-4a03-8b69-b082fcecd329 req-afe6cfd3-d195-400b-b072-35aac37ac261 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received unexpected event network-vif-plugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 for instance with vm_state building and task_state spawning. [ 1121.275102] env[61839]: DEBUG nova.network.neutron [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Successfully updated port: 539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1121.495202] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94a357b-4e7c-437a-91ab-7d0f94b365b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.502690] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9de5b94-e50d-473d-a6b7-8c71177f99d6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.531289] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cd943b-0986-479f-8160-53004bdc28ec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.538419] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944d5d80-1dd5-4d1d-85b8-e295b37df0c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.551340] env[61839]: DEBUG nova.compute.provider_tree [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.708591] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "dbd34858-9806-4d3f-b829-948651056da2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.708814] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "dbd34858-9806-4d3f-b829-948651056da2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.709031] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "dbd34858-9806-4d3f-b829-948651056da2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.709229] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "dbd34858-9806-4d3f-b829-948651056da2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.709403] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "dbd34858-9806-4d3f-b829-948651056da2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.711667] env[61839]: INFO nova.compute.manager [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Terminating instance [ 1121.713338] env[61839]: DEBUG nova.compute.manager [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1121.713537] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1121.714479] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb799e6-c2b1-469d-aeea-1502ca216501 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.722316] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.722551] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b10d2a3-2ca8-4a7a-aeb1-baa28d39170d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.729739] env[61839]: DEBUG oslo_vmware.api [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1121.729739] env[61839]: value = "task-1315110" [ 1121.729739] env[61839]: _type = "Task" [ 1121.729739] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.739177] env[61839]: DEBUG oslo_vmware.api [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.776976] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.777293] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.777293] env[61839]: DEBUG nova.network.neutron [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1122.054259] env[61839]: DEBUG nova.scheduler.client.report [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1122.239394] env[61839]: DEBUG oslo_vmware.api [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315110, 'name': PowerOffVM_Task, 'duration_secs': 0.196791} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.239683] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.239861] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.240144] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dddfa0e-b861-44a7-bc0b-d0ce1c802843 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.301798] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.301798] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.301798] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleting the datastore file [datastore2] dbd34858-9806-4d3f-b829-948651056da2 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.302063] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98cbe640-8725-4d3d-bb82-5c81b0616b85 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.308698] env[61839]: DEBUG oslo_vmware.api [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for the task: (returnval){ [ 1122.308698] env[61839]: value = "task-1315112" [ 1122.308698] env[61839]: _type = "Task" [ 1122.308698] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.312241] env[61839]: DEBUG nova.network.neutron [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1122.318253] env[61839]: DEBUG oslo_vmware.api [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.492969] env[61839]: DEBUG nova.network.neutron [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.559679] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.560223] env[61839]: DEBUG nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1122.818506] env[61839]: DEBUG oslo_vmware.api [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Task: {'id': task-1315112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157775} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.818862] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.818904] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.819134] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.819315] env[61839]: INFO nova.compute.manager [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [instance: dbd34858-9806-4d3f-b829-948651056da2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1122.819560] env[61839]: DEBUG oslo.service.loopingcall [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.819763] env[61839]: DEBUG nova.compute.manager [-] [instance: dbd34858-9806-4d3f-b829-948651056da2] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1122.819856] env[61839]: DEBUG nova.network.neutron [-] [instance: dbd34858-9806-4d3f-b829-948651056da2] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1122.995386] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.995740] env[61839]: DEBUG nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Instance network_info: |[{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1122.996208] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:e7:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '539fa2ab-4763-4c46-ae0d-4db54ad64fa5', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1123.003706] env[61839]: DEBUG oslo.service.loopingcall [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1123.003946] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1123.004288] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2eec1624-5aa1-458a-9108-fb48b690717f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.025971] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1123.025971] env[61839]: value = "task-1315113" [ 1123.025971] env[61839]: _type = "Task" [ 1123.025971] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.034368] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315113, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.064396] env[61839]: DEBUG nova.compute.utils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1123.066015] env[61839]: DEBUG nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1123.066207] env[61839]: DEBUG nova.network.neutron [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1123.104163] env[61839]: DEBUG nova.policy [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd83e8a2f63d4ae38c5989c1e3824e3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48d8c406ff504d71bba5fb74caf11c14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1123.225108] env[61839]: DEBUG nova.compute.manager [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1123.225216] env[61839]: DEBUG nova.compute.manager [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing instance network info cache due to event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1123.225463] env[61839]: DEBUG oslo_concurrency.lockutils [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.225584] env[61839]: DEBUG oslo_concurrency.lockutils [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.225777] env[61839]: DEBUG nova.network.neutron [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1123.361651] env[61839]: DEBUG nova.network.neutron [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Successfully created port: 3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1123.522680] env[61839]: DEBUG nova.network.neutron [-] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.536836] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315113, 'name': CreateVM_Task, 'duration_secs': 0.319929} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.536836] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.537141] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.537356] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.537736] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1123.538242] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f08a152a-9d40-409e-88f8-8f25803cd385 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.542897] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1123.542897] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b19058-1656-db4f-db5d-caf2ac467251" [ 1123.542897] env[61839]: _type = "Task" [ 1123.542897] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.552577] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b19058-1656-db4f-db5d-caf2ac467251, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.571423] env[61839]: DEBUG nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1123.908139] env[61839]: DEBUG nova.network.neutron [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updated VIF entry in instance network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.908755] env[61839]: DEBUG nova.network.neutron [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.024996] env[61839]: INFO nova.compute.manager [-] [instance: dbd34858-9806-4d3f-b829-948651056da2] Took 1.20 seconds to deallocate network for instance. [ 1124.054102] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b19058-1656-db4f-db5d-caf2ac467251, 'name': SearchDatastore_Task, 'duration_secs': 0.009694} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.054482] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.054763] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1124.055022] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.055365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.055586] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1124.055854] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ec3f281-607a-4042-9c89-63420ea49d44 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.063494] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1124.063663] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1124.064361] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58a7c85c-890d-43f5-95db-deed81b110ce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.069898] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1124.069898] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525b322a-bd8e-9c61-a85d-3785f4d37576" [ 1124.069898] env[61839]: _type = "Task" [ 1124.069898] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.080839] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525b322a-bd8e-9c61-a85d-3785f4d37576, 'name': SearchDatastore_Task, 'duration_secs': 0.007661} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.081524] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12a8e6a7-3c73-4ecf-b3be-a50be5696f4d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.086215] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1124.086215] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b8ec95-4cc5-f9e0-817e-215ebdb4c7db" [ 1124.086215] env[61839]: _type = "Task" [ 1124.086215] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.093590] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b8ec95-4cc5-f9e0-817e-215ebdb4c7db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.411861] env[61839]: DEBUG oslo_concurrency.lockutils [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.412148] env[61839]: DEBUG nova.compute.manager [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Received event network-vif-deleted-7ee0f326-ead2-4849-823d-9d652c5c339b {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1124.412340] env[61839]: INFO nova.compute.manager [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Neutron deleted interface 7ee0f326-ead2-4849-823d-9d652c5c339b; detaching it from the instance and deleting it from the info cache [ 1124.412518] env[61839]: DEBUG nova.network.neutron [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.531404] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.531699] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.531934] env[61839]: DEBUG nova.objects.instance [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lazy-loading 'resources' on Instance uuid dbd34858-9806-4d3f-b829-948651056da2 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.578397] env[61839]: DEBUG nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1124.597130] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52b8ec95-4cc5-f9e0-817e-215ebdb4c7db, 'name': SearchDatastore_Task, 'duration_secs': 0.007596} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.598993] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.599287] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1124.599727] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9d3ec0f-0027-45e4-8599-eba29a8eaad7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.605943] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1124.606202] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1124.606366] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1124.606579] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1124.606744] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1124.606900] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1124.607127] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1124.607297] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1124.607470] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1124.607641] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1124.607819] env[61839]: DEBUG nova.virt.hardware [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1124.608902] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46aa854d-0d73-44b0-8ead-56134b1293dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.612574] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1124.612574] env[61839]: value = "task-1315114" [ 1124.612574] env[61839]: _type = "Task" [ 1124.612574] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.618841] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e13c699-3ea2-48a0-9858-ad944e7753f4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.625217] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.911057] env[61839]: DEBUG nova.network.neutron [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Successfully updated port: 3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1124.915271] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3cc63645-9722-46d8-8071-2abbe134dad3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.925410] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08aa3f7d-4f3b-42e0-9c52-2aabc2e19c2f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.953216] env[61839]: DEBUG nova.compute.manager [req-839b0d8f-789a-48b8-8091-4774e155be68 req-0fa0240a-d3f6-47a4-9618-2c2ece331850 service nova] [instance: dbd34858-9806-4d3f-b829-948651056da2] Detach interface failed, port_id=7ee0f326-ead2-4849-823d-9d652c5c339b, reason: Instance dbd34858-9806-4d3f-b829-948651056da2 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1125.107179] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e066ee-76e1-407c-b976-16dfcb456fce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.117391] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f6a685-0e4a-4909-890c-1671eb4317a0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.125353] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315114, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445789} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.148147] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1125.148466] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1125.148928] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53e5cfdd-400e-480e-a213-df63b0cefc6d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.151199] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d4681c-f00d-4286-b3f5-a54be1307ce2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.159520] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b71da8-b877-42d2-8b75-b3f102bf2e52 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.163107] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1125.163107] env[61839]: value = "task-1315115" [ 1125.163107] env[61839]: _type = "Task" [ 1125.163107] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.174591] env[61839]: DEBUG nova.compute.provider_tree [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1125.180114] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315115, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.250897] env[61839]: DEBUG nova.compute.manager [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Received event network-vif-plugged-3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1125.251202] env[61839]: DEBUG oslo_concurrency.lockutils [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] Acquiring lock "3e153d8a-e069-443c-9db4-7614a6475971-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.251485] env[61839]: DEBUG oslo_concurrency.lockutils [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] Lock "3e153d8a-e069-443c-9db4-7614a6475971-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.251764] env[61839]: DEBUG oslo_concurrency.lockutils [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] Lock "3e153d8a-e069-443c-9db4-7614a6475971-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.252018] env[61839]: DEBUG nova.compute.manager [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] No waiting events found dispatching network-vif-plugged-3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1125.252268] env[61839]: WARNING nova.compute.manager [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Received unexpected event network-vif-plugged-3e08c400-de17-4651-a33f-716a238d9cff for instance with vm_state building and task_state spawning. [ 1125.252513] env[61839]: DEBUG nova.compute.manager [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Received event network-changed-3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1125.252752] env[61839]: DEBUG nova.compute.manager [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Refreshing instance network info cache due to event network-changed-3e08c400-de17-4651-a33f-716a238d9cff. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1125.253019] env[61839]: DEBUG oslo_concurrency.lockutils [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] Acquiring lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.253241] env[61839]: DEBUG oslo_concurrency.lockutils [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] Acquired lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.253471] env[61839]: DEBUG nova.network.neutron [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Refreshing network info cache for port 3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.413678] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.672401] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315115, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066385} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.672654] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1125.673407] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d9011c-4524-471b-a3b7-47d205c7bc5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.696183] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.696441] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00a28e17-24e2-46e4-80fa-e1adb023db97 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.710700] env[61839]: ERROR nova.scheduler.client.report [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] [req-4fa22e4d-0e20-490c-8493-75ccf9d79e67] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4fa22e4d-0e20-490c-8493-75ccf9d79e67"}]} [ 1125.718117] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1125.718117] env[61839]: value = "task-1315116" [ 1125.718117] env[61839]: _type = "Task" [ 1125.718117] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.725718] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.726568] env[61839]: DEBUG nova.scheduler.client.report [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1125.741328] env[61839]: DEBUG nova.scheduler.client.report [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1125.741554] env[61839]: DEBUG nova.compute.provider_tree [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1125.752894] env[61839]: DEBUG nova.scheduler.client.report [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1125.772955] env[61839]: DEBUG nova.scheduler.client.report [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1125.790808] env[61839]: DEBUG nova.network.neutron [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1125.840998] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5d2a37-66a4-4689-9079-32657ee8c718 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.848956] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8900b29e-b91d-46f1-89b3-8fcc87bef217 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.879244] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd721c4-26f5-4bdc-a044-8658254a3dd2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.886093] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19359be-98d7-4be8-b7e9-a54703e5fdd4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.900307] env[61839]: DEBUG nova.compute.provider_tree [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1125.932332] env[61839]: DEBUG nova.network.neutron [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.228123] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315116, 'name': ReconfigVM_Task, 'duration_secs': 0.275547} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.228472] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.229107] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2469d410-d629-40a5-99f4-fd2249a745cc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.235731] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1126.235731] env[61839]: value = "task-1315117" [ 1126.235731] env[61839]: _type = "Task" [ 1126.235731] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.242806] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315117, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.430584] env[61839]: DEBUG nova.scheduler.client.report [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 139 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1126.430863] env[61839]: DEBUG nova.compute.provider_tree [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 139 to 140 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1126.431063] env[61839]: DEBUG nova.compute.provider_tree [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1126.434374] env[61839]: DEBUG oslo_concurrency.lockutils [req-2575b6ad-54f5-4848-a374-a13995eec12f req-d6681dae-0a57-410d-952e-6c2da4b4acb5 service nova] Releasing lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.434902] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.435070] env[61839]: DEBUG nova.network.neutron [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.745430] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315117, 'name': Rename_Task, 'duration_secs': 0.149726} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.745672] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1126.745926] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a484ce7f-a6b4-441b-9fdd-2d10ebbe89eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.751584] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1126.751584] env[61839]: value = "task-1315118" [ 1126.751584] env[61839]: _type = "Task" [ 1126.751584] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.758447] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.936822] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.404s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.956979] env[61839]: INFO nova.scheduler.client.report [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Deleted allocations for instance dbd34858-9806-4d3f-b829-948651056da2 [ 1126.982888] env[61839]: DEBUG nova.network.neutron [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1127.141885] env[61839]: DEBUG nova.network.neutron [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Updating instance_info_cache with network_info: [{"id": "3e08c400-de17-4651-a33f-716a238d9cff", "address": "fa:16:3e:2c:ae:b6", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e08c400-de", "ovs_interfaceid": "3e08c400-de17-4651-a33f-716a238d9cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.262216] env[61839]: DEBUG oslo_vmware.api [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315118, 'name': PowerOnVM_Task, 'duration_secs': 0.457811} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.262434] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1127.262633] env[61839]: INFO nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Took 6.50 seconds to spawn the instance on the hypervisor. [ 1127.262817] env[61839]: DEBUG nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1127.263574] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68dec0f-f86a-4352-a6c6-69d59d653ecc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.465361] env[61839]: DEBUG oslo_concurrency.lockutils [None req-6599d9be-6065-43e1-80ef-35d18146b7db tempest-AttachInterfacesTestJSON-1805184072 tempest-AttachInterfacesTestJSON-1805184072-project-member] Lock "dbd34858-9806-4d3f-b829-948651056da2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.756s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.645098] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.645440] env[61839]: DEBUG nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Instance network_info: |[{"id": "3e08c400-de17-4651-a33f-716a238d9cff", "address": "fa:16:3e:2c:ae:b6", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e08c400-de", "ovs_interfaceid": "3e08c400-de17-4651-a33f-716a238d9cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1127.645898] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:ae:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e08c400-de17-4651-a33f-716a238d9cff', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.653289] env[61839]: DEBUG oslo.service.loopingcall [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1127.653517] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.653750] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26cc1d58-6252-4ecf-aab0-7d1c4b9d3d2b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.674115] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.674115] env[61839]: value = "task-1315119" [ 1127.674115] env[61839]: _type = "Task" [ 1127.674115] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.683363] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315119, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.779903] env[61839]: INFO nova.compute.manager [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Took 11.19 seconds to build instance. [ 1128.185920] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315119, 'name': CreateVM_Task, 'duration_secs': 0.486935} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.185920] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.185920] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.185920] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.185920] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1128.186384] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba1324b7-699d-408e-af91-508d0b8cb9ac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.190888] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1128.190888] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52820ecb-0a06-dd43-0e5c-7d186acf4932" [ 1128.190888] env[61839]: _type = "Task" [ 1128.190888] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.203673] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52820ecb-0a06-dd43-0e5c-7d186acf4932, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.281887] env[61839]: DEBUG oslo_concurrency.lockutils [None req-fdade56a-c95a-426a-a307-4e69a36263c9 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.705s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.702333] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52820ecb-0a06-dd43-0e5c-7d186acf4932, 'name': SearchDatastore_Task, 'duration_secs': 0.011777} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.705250] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.705250] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.705250] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.705250] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.705250] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.705250] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41a334d7-7de6-492b-9398-3bc034235f26 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.714022] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.714022] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.714022] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d22106e-6f86-493c-aed0-889964e61f97 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.718960] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1128.718960] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52578043-bd50-72d7-5ed9-29dd6c4463c7" [ 1128.718960] env[61839]: _type = "Task" [ 1128.718960] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.728376] env[61839]: DEBUG nova.compute.manager [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1128.728376] env[61839]: DEBUG nova.compute.manager [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing instance network info cache due to event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1128.728376] env[61839]: DEBUG oslo_concurrency.lockutils [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.728376] env[61839]: DEBUG oslo_concurrency.lockutils [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.728376] env[61839]: DEBUG nova.network.neutron [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1128.736781] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52578043-bd50-72d7-5ed9-29dd6c4463c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008618} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.737919] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da00a481-e216-4d9c-8c81-441b9659aaf4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.746247] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1128.746247] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5291823d-7983-0ea8-62fb-3a377a04911e" [ 1128.746247] env[61839]: _type = "Task" [ 1128.746247] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.755228] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5291823d-7983-0ea8-62fb-3a377a04911e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.260025] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5291823d-7983-0ea8-62fb-3a377a04911e, 'name': SearchDatastore_Task, 'duration_secs': 0.008695} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.260025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.260025] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 3e153d8a-e069-443c-9db4-7614a6475971/3e153d8a-e069-443c-9db4-7614a6475971.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.260025] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4250f02a-33a4-429e-9cfc-d330ed8fc4c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.267599] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1129.267599] env[61839]: value = "task-1315120" [ 1129.267599] env[61839]: _type = "Task" [ 1129.267599] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.278436] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315120, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.465924] env[61839]: DEBUG nova.network.neutron [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updated VIF entry in instance network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1129.466220] env[61839]: DEBUG nova.network.neutron [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.479531] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.778162] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315120, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.972199] env[61839]: DEBUG oslo_concurrency.lockutils [req-26092cb4-475e-4106-a81e-1f88f2bc6ac1 req-d42a2508-fc97-467a-add4-f9993be3c146 service nova] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.982988] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Getting list of instances from cluster (obj){ [ 1129.982988] env[61839]: value = "domain-c8" [ 1129.982988] env[61839]: _type = "ClusterComputeResource" [ 1129.982988] env[61839]: } {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1129.984071] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3d2385-301d-48f6-b69f-bf0efda928b0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.999632] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Got total of 4 instances {{(pid=61839) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1129.999817] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid 7f7b3f51-3e96-49f1-a84a-81ae649e6938 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1130.000032] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1130.000209] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1130.000366] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Triggering sync for uuid 3e153d8a-e069-443c-9db4-7614a6475971 {{(pid=61839) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1130.000708] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.000938] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.001222] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.001406] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.001632] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.001821] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.002100] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "3e153d8a-e069-443c-9db4-7614a6475971" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.002910] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307c9d55-aad9-42cf-8a71-fd539fb6558b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.006020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b26467-03a0-443c-9ecc-2ffc4d9fc006 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.009067] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0d0e9c-23eb-46db-b02d-9baa9ebdcc96 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.011579] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.279910] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315120, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517125} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.280293] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore2] 3e153d8a-e069-443c-9db4-7614a6475971/3e153d8a-e069-443c-9db4-7614a6475971.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1130.280422] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1130.280684] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7bc2698-bba6-440b-8052-122e15bfe775 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.286951] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1130.286951] env[61839]: value = "task-1315121" [ 1130.286951] env[61839]: _type = "Task" [ 1130.286951] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.294433] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.518036] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.517s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.523687] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.526173] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.525s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.798811] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.262155} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.799480] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1130.801027] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cecd6ea-5977-4819-b043-48c34db08e76 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.826739] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 3e153d8a-e069-443c-9db4-7614a6475971/3e153d8a-e069-443c-9db4-7614a6475971.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.827199] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7321594b-5f45-4cb2-bfad-6fb2b67c5707 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.846949] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1130.846949] env[61839]: value = "task-1315122" [ 1130.846949] env[61839]: _type = "Task" [ 1130.846949] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.855780] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.357986] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.490101] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.858403] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315122, 'name': ReconfigVM_Task, 'duration_secs': 0.991713} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.858694] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 3e153d8a-e069-443c-9db4-7614a6475971/3e153d8a-e069-443c-9db4-7614a6475971.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.859346] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f85d8ec-4e09-493f-a61b-e7d7e24dca95 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.865425] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1131.865425] env[61839]: value = "task-1315123" [ 1131.865425] env[61839]: _type = "Task" [ 1131.865425] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.872933] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315123, 'name': Rename_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.375182] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315123, 'name': Rename_Task, 'duration_secs': 0.126496} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.375542] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1132.375759] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93df3683-27fd-4c91-9a97-2e662b17c119 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.382246] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1132.382246] env[61839]: value = "task-1315124" [ 1132.382246] env[61839]: _type = "Task" [ 1132.382246] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.390755] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.489531] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.489710] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1132.489829] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1132.892185] env[61839]: DEBUG oslo_vmware.api [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315124, 'name': PowerOnVM_Task, 'duration_secs': 0.463832} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.892502] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.892759] env[61839]: INFO nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Took 8.31 seconds to spawn the instance on the hypervisor. [ 1132.892997] env[61839]: DEBUG nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1132.893777] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01f91a3-e5fc-4d22-984c-42965e54800d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.995059] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1133.022679] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.022864] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.023029] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1133.023192] env[61839]: DEBUG nova.objects.instance [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lazy-loading 'info_cache' on Instance uuid 7f7b3f51-3e96-49f1-a84a-81ae649e6938 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.274304] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.274690] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.412953] env[61839]: INFO nova.compute.manager [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Took 13.90 seconds to build instance. [ 1133.777202] env[61839]: DEBUG nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1133.915133] env[61839]: DEBUG oslo_concurrency.lockutils [None req-2dbd4976-b3c9-4622-8bfc-b04658290a17 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "3e153d8a-e069-443c-9db4-7614a6475971" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.404s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.915950] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "3e153d8a-e069-443c-9db4-7614a6475971" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.914s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.916272] env[61839]: INFO nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] During sync_power_state the instance has a pending task (spawning). Skip. [ 1133.916547] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "3e153d8a-e069-443c-9db4-7614a6475971" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.212508] env[61839]: DEBUG nova.compute.manager [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Received event network-changed-3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1134.212508] env[61839]: DEBUG nova.compute.manager [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Refreshing instance network info cache due to event network-changed-3e08c400-de17-4651-a33f-716a238d9cff. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1134.212508] env[61839]: DEBUG oslo_concurrency.lockutils [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] Acquiring lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.212718] env[61839]: DEBUG oslo_concurrency.lockutils [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] Acquired lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.212819] env[61839]: DEBUG nova.network.neutron [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Refreshing network info cache for port 3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1134.302416] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.302682] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.304123] env[61839]: INFO nova.compute.claims [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1134.865791] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updating instance_info_cache with network_info: [{"id": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "address": "fa:16:3e:17:50:4a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6f5c89-f3", "ovs_interfaceid": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.990755] env[61839]: DEBUG nova.network.neutron [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Updated VIF entry in instance network info cache for port 3e08c400-de17-4651-a33f-716a238d9cff. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1134.991187] env[61839]: DEBUG nova.network.neutron [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Updating instance_info_cache with network_info: [{"id": "3e08c400-de17-4651-a33f-716a238d9cff", "address": "fa:16:3e:2c:ae:b6", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e08c400-de", "ovs_interfaceid": "3e08c400-de17-4651-a33f-716a238d9cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.368301] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.368493] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1135.368718] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.368902] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.394011] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d1fc5e-f7ac-4978-8fdc-443fe019ade4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.402319] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1029115f-e8f5-4644-9a5d-5b3b67152233 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.432915] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4130031f-b8e8-4860-9f87-678ee85ad82e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.440433] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6364774a-3724-4b57-bd47-0260486b78f6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.453633] env[61839]: DEBUG nova.compute.provider_tree [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1135.494019] env[61839]: DEBUG oslo_concurrency.lockutils [req-2216c996-5d82-48cc-9fd7-759ff001db3b req-d7d9a721-cbc0-4ac2-957f-6b78c4879cca service nova] Releasing lock "refresh_cache-3e153d8a-e069-443c-9db4-7614a6475971" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.872440] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.973232] env[61839]: ERROR nova.scheduler.client.report [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [req-9f28ed6f-8aca-4c53-b654-49c08879f60b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9f28ed6f-8aca-4c53-b654-49c08879f60b"}]} [ 1135.989360] env[61839]: DEBUG nova.scheduler.client.report [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1136.002189] env[61839]: DEBUG nova.scheduler.client.report [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1136.002415] env[61839]: DEBUG nova.compute.provider_tree [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1136.012230] env[61839]: DEBUG nova.scheduler.client.report [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1136.028586] env[61839]: DEBUG nova.scheduler.client.report [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1136.089986] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519c5569-7777-432d-a023-d330d2006cf6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.097532] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679f0829-d6ac-4306-863a-bc4fe0a51519 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.127399] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4ff609-ca18-4c89-873a-20bed2caab8d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.135277] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f044ae-0867-41ee-bb7b-32e0950cf28d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.147787] env[61839]: DEBUG nova.compute.provider_tree [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1136.676864] env[61839]: DEBUG nova.scheduler.client.report [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 141 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1136.677154] env[61839]: DEBUG nova.compute.provider_tree [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 141 to 142 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1136.677356] env[61839]: DEBUG nova.compute.provider_tree [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1137.182696] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.880s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.183243] env[61839]: DEBUG nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1137.186247] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.314s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.186994] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.186994] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1137.187841] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9075c87-ebd0-467e-9ddb-fc442a012559 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.196323] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c70654e-b3e5-418c-affc-6c0cf00f74c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.210304] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45718cd2-d95c-43fc-b4db-8f1174c4e9b9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.216879] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f07ed8-9b09-48fa-a623-c1352a4aafe4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.247127] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180710MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1137.247291] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.247478] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.688948] env[61839]: DEBUG nova.compute.utils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1137.690439] env[61839]: DEBUG nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1137.690615] env[61839]: DEBUG nova.network.neutron [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.736852] env[61839]: DEBUG nova.policy [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '414be4749afe4b689ee00fa2ed0b4e4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '660bd736c7754d568f5f5b76090c4a27', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1138.019373] env[61839]: DEBUG nova.network.neutron [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Successfully created port: 6dd4bd1e-9764-46b3-920b-d6e253d24314 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.195475] env[61839]: DEBUG nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1138.273835] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.274009] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance a1defab7-8433-411d-b7e2-c31f6a34b8e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.274143] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 62af556c-c9b1-4de6-bb07-532ba67fa367 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.274267] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3e153d8a-e069-443c-9db4-7614a6475971 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.274386] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance fa0cfb44-5ebf-4472-af93-8f8c518714fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.274567] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1138.274793] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1138.350910] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0299e931-e64b-42df-9346-07719d680092 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.358728] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97977fae-8a5c-4cb0-ac6d-3c88f6273f12 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.390298] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8524e973-3b21-499e-9781-27e93ab7a815 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.399028] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a2ec5c-4798-4304-ab82-279a26e06aa3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.412654] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.915833] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1139.204081] env[61839]: DEBUG nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1139.229143] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1139.229445] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1139.229619] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.229814] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1139.229963] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.230133] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1139.230362] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1139.230569] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1139.230781] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1139.230998] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1139.231205] env[61839]: DEBUG nova.virt.hardware [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1139.232079] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6464ecb-38ae-4cc2-8d06-f8740d0d9db1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.240217] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6577698c-3691-4aff-9771-af50fdbf7a94 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.421466] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1139.421687] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.174s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.476011] env[61839]: DEBUG nova.compute.manager [req-d45b613d-f05c-4fed-8784-3ed9ff2099b7 req-f287c13b-bc85-473d-a94b-af4514a08c14 service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Received event network-vif-plugged-6dd4bd1e-9764-46b3-920b-d6e253d24314 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1139.476245] env[61839]: DEBUG oslo_concurrency.lockutils [req-d45b613d-f05c-4fed-8784-3ed9ff2099b7 req-f287c13b-bc85-473d-a94b-af4514a08c14 service nova] Acquiring lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.476461] env[61839]: DEBUG oslo_concurrency.lockutils [req-d45b613d-f05c-4fed-8784-3ed9ff2099b7 req-f287c13b-bc85-473d-a94b-af4514a08c14 service nova] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.476635] env[61839]: DEBUG oslo_concurrency.lockutils [req-d45b613d-f05c-4fed-8784-3ed9ff2099b7 req-f287c13b-bc85-473d-a94b-af4514a08c14 service nova] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.476888] env[61839]: DEBUG nova.compute.manager [req-d45b613d-f05c-4fed-8784-3ed9ff2099b7 req-f287c13b-bc85-473d-a94b-af4514a08c14 service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] No waiting events found dispatching network-vif-plugged-6dd4bd1e-9764-46b3-920b-d6e253d24314 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1139.477076] env[61839]: WARNING nova.compute.manager [req-d45b613d-f05c-4fed-8784-3ed9ff2099b7 req-f287c13b-bc85-473d-a94b-af4514a08c14 service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Received unexpected event network-vif-plugged-6dd4bd1e-9764-46b3-920b-d6e253d24314 for instance with vm_state building and task_state spawning. [ 1139.557227] env[61839]: DEBUG nova.network.neutron [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Successfully updated port: 6dd4bd1e-9764-46b3-920b-d6e253d24314 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.059687] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "refresh_cache-fa0cfb44-5ebf-4472-af93-8f8c518714fd" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.059823] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquired lock "refresh_cache-fa0cfb44-5ebf-4472-af93-8f8c518714fd" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.059945] env[61839]: DEBUG nova.network.neutron [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.542186] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.542523] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.542606] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.542752] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.542898] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1140.591526] env[61839]: DEBUG nova.network.neutron [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1140.713714] env[61839]: DEBUG nova.network.neutron [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Updating instance_info_cache with network_info: [{"id": "6dd4bd1e-9764-46b3-920b-d6e253d24314", "address": "fa:16:3e:de:4e:e1", "network": {"id": "270912f2-8700-45bd-81b8-bbbb4a527ce4", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-638376756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "660bd736c7754d568f5f5b76090c4a27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69076131-87ac-46dd-9d5d-8d1b4ea7dec6", "external-id": "nsx-vlan-transportzone-327", "segmentation_id": 327, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6dd4bd1e-97", "ovs_interfaceid": "6dd4bd1e-9764-46b3-920b-d6e253d24314", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.216570] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Releasing lock "refresh_cache-fa0cfb44-5ebf-4472-af93-8f8c518714fd" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.216984] env[61839]: DEBUG nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Instance network_info: |[{"id": "6dd4bd1e-9764-46b3-920b-d6e253d24314", "address": "fa:16:3e:de:4e:e1", "network": {"id": "270912f2-8700-45bd-81b8-bbbb4a527ce4", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-638376756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "660bd736c7754d568f5f5b76090c4a27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69076131-87ac-46dd-9d5d-8d1b4ea7dec6", "external-id": "nsx-vlan-transportzone-327", "segmentation_id": 327, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6dd4bd1e-97", "ovs_interfaceid": "6dd4bd1e-9764-46b3-920b-d6e253d24314", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1141.217456] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:4e:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69076131-87ac-46dd-9d5d-8d1b4ea7dec6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6dd4bd1e-9764-46b3-920b-d6e253d24314', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.224964] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Creating folder: Project (660bd736c7754d568f5f5b76090c4a27). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1141.225287] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22dd7864-cfd9-4a87-911c-5d93470b905f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.236469] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Created folder: Project (660bd736c7754d568f5f5b76090c4a27) in parent group-v281288. [ 1141.236662] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Creating folder: Instances. Parent ref: group-v281461. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1141.236946] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20059066-44e9-4321-8b98-9a1e3a074e72 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.246372] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Created folder: Instances in parent group-v281461. [ 1141.246600] env[61839]: DEBUG oslo.service.loopingcall [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1141.246820] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.247037] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d646d04c-41e4-4b61-8995-1f37033e9508 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.266316] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.266316] env[61839]: value = "task-1315130" [ 1141.266316] env[61839]: _type = "Task" [ 1141.266316] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.273530] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315130, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.500174] env[61839]: DEBUG nova.compute.manager [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Received event network-changed-6dd4bd1e-9764-46b3-920b-d6e253d24314 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1141.500384] env[61839]: DEBUG nova.compute.manager [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Refreshing instance network info cache due to event network-changed-6dd4bd1e-9764-46b3-920b-d6e253d24314. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1141.500606] env[61839]: DEBUG oslo_concurrency.lockutils [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] Acquiring lock "refresh_cache-fa0cfb44-5ebf-4472-af93-8f8c518714fd" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.500750] env[61839]: DEBUG oslo_concurrency.lockutils [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] Acquired lock "refresh_cache-fa0cfb44-5ebf-4472-af93-8f8c518714fd" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.500919] env[61839]: DEBUG nova.network.neutron [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Refreshing network info cache for port 6dd4bd1e-9764-46b3-920b-d6e253d24314 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1141.776607] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315130, 'name': CreateVM_Task, 'duration_secs': 0.422496} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.776993] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1141.777486] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.777657] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.778035] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1141.778298] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b8f2d7c-fb04-4e98-92ef-f94b3c384651 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.782363] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1141.782363] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5257811d-c598-d4cb-e72e-9155d037532d" [ 1141.782363] env[61839]: _type = "Task" [ 1141.782363] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.789460] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5257811d-c598-d4cb-e72e-9155d037532d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.189390] env[61839]: DEBUG nova.network.neutron [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Updated VIF entry in instance network info cache for port 6dd4bd1e-9764-46b3-920b-d6e253d24314. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1142.189824] env[61839]: DEBUG nova.network.neutron [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Updating instance_info_cache with network_info: [{"id": "6dd4bd1e-9764-46b3-920b-d6e253d24314", "address": "fa:16:3e:de:4e:e1", "network": {"id": "270912f2-8700-45bd-81b8-bbbb4a527ce4", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-638376756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "660bd736c7754d568f5f5b76090c4a27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69076131-87ac-46dd-9d5d-8d1b4ea7dec6", "external-id": "nsx-vlan-transportzone-327", "segmentation_id": 327, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6dd4bd1e-97", "ovs_interfaceid": "6dd4bd1e-9764-46b3-920b-d6e253d24314", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.292891] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5257811d-c598-d4cb-e72e-9155d037532d, 'name': SearchDatastore_Task, 'duration_secs': 0.009333} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.293192] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.293442] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1142.293715] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.293973] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.294216] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1142.294491] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93cc94af-9cb3-4ccb-9eca-2b7f0efd5ba5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.302861] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.303050] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1142.303770] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f486eb6a-681f-4fc5-9e92-013850731da0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.309313] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1142.309313] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521fa5ac-deda-762b-4d8e-8bb09447d08d" [ 1142.309313] env[61839]: _type = "Task" [ 1142.309313] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.316614] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521fa5ac-deda-762b-4d8e-8bb09447d08d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.692594] env[61839]: DEBUG oslo_concurrency.lockutils [req-739c3f23-a46b-40d1-9b11-8d54fdae99f6 req-7922a285-76cd-4ae0-9148-c14d11ca35ec service nova] Releasing lock "refresh_cache-fa0cfb44-5ebf-4472-af93-8f8c518714fd" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.820383] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]521fa5ac-deda-762b-4d8e-8bb09447d08d, 'name': SearchDatastore_Task, 'duration_secs': 0.013134} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.821120] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6170a07f-d7ae-4828-a3bf-a871e490dc83 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.826313] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1142.826313] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dae9b8-1fc1-4af6-1de1-542d33adfe01" [ 1142.826313] env[61839]: _type = "Task" [ 1142.826313] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.833737] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dae9b8-1fc1-4af6-1de1-542d33adfe01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.336645] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52dae9b8-1fc1-4af6-1de1-542d33adfe01, 'name': SearchDatastore_Task, 'duration_secs': 0.008869} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.336956] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.337240] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] fa0cfb44-5ebf-4472-af93-8f8c518714fd/fa0cfb44-5ebf-4472-af93-8f8c518714fd.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1143.337505] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50c564c0-0651-44ad-a406-1a453a1fc201 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.343860] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1143.343860] env[61839]: value = "task-1315132" [ 1143.343860] env[61839]: _type = "Task" [ 1143.343860] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.350911] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.853049] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315132, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44736} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.853912] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] fa0cfb44-5ebf-4472-af93-8f8c518714fd/fa0cfb44-5ebf-4472-af93-8f8c518714fd.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1143.854055] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1143.854282] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-faf071da-47e0-47bb-ad41-b11af2c8f855 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.860777] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1143.860777] env[61839]: value = "task-1315133" [ 1143.860777] env[61839]: _type = "Task" [ 1143.860777] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.868050] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315133, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.374453] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315133, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05652} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.374833] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1144.375929] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4e8a45-b377-406f-931e-2272b82b2de4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.404631] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] fa0cfb44-5ebf-4472-af93-8f8c518714fd/fa0cfb44-5ebf-4472-af93-8f8c518714fd.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.404841] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bba52fa0-ca12-462e-a446-905a42c05a85 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.424024] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1144.424024] env[61839]: value = "task-1315135" [ 1144.424024] env[61839]: _type = "Task" [ 1144.424024] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.433525] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315135, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.937841] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315135, 'name': ReconfigVM_Task, 'duration_secs': 0.266342} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.938386] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Reconfigured VM instance instance-0000006f to attach disk [datastore1] fa0cfb44-5ebf-4472-af93-8f8c518714fd/fa0cfb44-5ebf-4472-af93-8f8c518714fd.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.939037] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa9d5aab-0631-406a-833f-ec9d9244c9dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.947902] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1144.947902] env[61839]: value = "task-1315136" [ 1144.947902] env[61839]: _type = "Task" [ 1144.947902] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.961219] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315136, 'name': Rename_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.439362] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.439638] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.439863] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.440064] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.440251] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.443520] env[61839]: INFO nova.compute.manager [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Terminating instance [ 1145.445292] env[61839]: DEBUG nova.compute.manager [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1145.445489] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1145.446331] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9859b82b-de83-4c9a-8919-03250711a8da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.457237] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315136, 'name': Rename_Task, 'duration_secs': 0.15054} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.459232] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.459503] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1145.459707] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5f41966-cf5f-4ab0-9a34-75e5552808c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.461030] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21a72c52-5833-487f-8950-0adc0c45b7bd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.467451] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1145.467451] env[61839]: value = "task-1315137" [ 1145.467451] env[61839]: _type = "Task" [ 1145.467451] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.468598] env[61839]: DEBUG oslo_vmware.api [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1145.468598] env[61839]: value = "task-1315138" [ 1145.468598] env[61839]: _type = "Task" [ 1145.468598] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.478983] env[61839]: DEBUG oslo_vmware.api [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.481543] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.981104] env[61839]: DEBUG oslo_vmware.api [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315137, 'name': PowerOnVM_Task, 'duration_secs': 0.463242} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.984027] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1145.984249] env[61839]: INFO nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Took 6.78 seconds to spawn the instance on the hypervisor. [ 1145.984435] env[61839]: DEBUG nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1145.984720] env[61839]: DEBUG oslo_vmware.api [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315138, 'name': PowerOffVM_Task, 'duration_secs': 0.189135} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.985401] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44a575c-2497-4976-ad9b-a5a46a76c68c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.987882] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1145.988103] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1145.988371] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7576422-4c94-42e9-a2d8-0c5d7b0e430e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.047023] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1146.047258] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1146.047446] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleting the datastore file [datastore2] a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1146.047744] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e14fc4cd-d841-4289-8e7f-b7ff46008745 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.053353] env[61839]: DEBUG oslo_vmware.api [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for the task: (returnval){ [ 1146.053353] env[61839]: value = "task-1315140" [ 1146.053353] env[61839]: _type = "Task" [ 1146.053353] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.061055] env[61839]: DEBUG oslo_vmware.api [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.503318] env[61839]: INFO nova.compute.manager [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Took 12.22 seconds to build instance. [ 1146.563880] env[61839]: DEBUG oslo_vmware.api [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Task: {'id': task-1315140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429584} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.563880] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1146.563880] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1146.563880] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1146.563880] env[61839]: INFO nova.compute.manager [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1146.564243] env[61839]: DEBUG oslo.service.loopingcall [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1146.564346] env[61839]: DEBUG nova.compute.manager [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1146.564460] env[61839]: DEBUG nova.network.neutron [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1146.968326] env[61839]: DEBUG nova.compute.manager [req-ed116279-8384-46dc-af44-a07622d006ce req-122153c1-f77f-48ce-a1b5-15fe8fe82272 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Received event network-vif-deleted-8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1146.968326] env[61839]: INFO nova.compute.manager [req-ed116279-8384-46dc-af44-a07622d006ce req-122153c1-f77f-48ce-a1b5-15fe8fe82272 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Neutron deleted interface 8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7; detaching it from the instance and deleting it from the info cache [ 1146.968452] env[61839]: DEBUG nova.network.neutron [req-ed116279-8384-46dc-af44-a07622d006ce req-122153c1-f77f-48ce-a1b5-15fe8fe82272 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.005117] env[61839]: DEBUG oslo_concurrency.lockutils [None req-0b9a5533-16d5-44b5-91a3-657fb6e57c75 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.730s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.448456] env[61839]: DEBUG nova.network.neutron [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.471696] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7fcbf28f-644a-41f7-a743-6392281ce9cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.481861] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec1f53e-698e-4116-be0c-e2f728b80bff {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.508394] env[61839]: DEBUG nova.compute.manager [req-ed116279-8384-46dc-af44-a07622d006ce req-122153c1-f77f-48ce-a1b5-15fe8fe82272 service nova] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Detach interface failed, port_id=8b7a9c37-1009-4d15-b75d-68cbeb5d2fc7, reason: Instance a1defab7-8433-411d-b7e2-c31f6a34b8e0 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1147.952572] env[61839]: INFO nova.compute.manager [-] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Took 1.39 seconds to deallocate network for instance. [ 1148.324540] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.324870] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.325102] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.325312] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.325489] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.327728] env[61839]: INFO nova.compute.manager [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Terminating instance [ 1148.329631] env[61839]: DEBUG nova.compute.manager [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1148.329830] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1148.330686] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796d692e-28a8-4673-872e-63418c372fb5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.338253] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.338486] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e50da3b8-03e4-42c5-8f95-9c1913e6d9b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.345112] env[61839]: DEBUG oslo_vmware.api [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1148.345112] env[61839]: value = "task-1315142" [ 1148.345112] env[61839]: _type = "Task" [ 1148.345112] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.354309] env[61839]: DEBUG oslo_vmware.api [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.459856] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.460222] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.460503] env[61839]: DEBUG nova.objects.instance [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lazy-loading 'resources' on Instance uuid a1defab7-8433-411d-b7e2-c31f6a34b8e0 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1148.854713] env[61839]: DEBUG oslo_vmware.api [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315142, 'name': PowerOffVM_Task, 'duration_secs': 0.175647} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.854962] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1148.855163] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1148.855413] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-409592a7-1353-4be2-b109-977fdb02e791 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.910502] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1148.910725] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1148.910916] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Deleting the datastore file [datastore1] fa0cfb44-5ebf-4472-af93-8f8c518714fd {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1148.911204] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7764901-173d-4757-afcb-ff967138e497 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.916974] env[61839]: DEBUG oslo_vmware.api [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for the task: (returnval){ [ 1148.916974] env[61839]: value = "task-1315145" [ 1148.916974] env[61839]: _type = "Task" [ 1148.916974] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.924411] env[61839]: DEBUG oslo_vmware.api [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315145, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.036943] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9667094c-40f0-4af3-88fc-c252f4ccc46f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.044649] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652bdea5-3592-4de6-91ee-22d3ed8434ba {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.074781] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97228d87-1f97-441c-bf8f-fadc762dcb5c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.082131] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d78f11-5cc7-40ad-a647-ba683b70f454 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.095188] env[61839]: DEBUG nova.compute.provider_tree [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.426934] env[61839]: DEBUG oslo_vmware.api [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Task: {'id': task-1315145, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131473} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.428049] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.428049] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.428049] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.428234] env[61839]: INFO nova.compute.manager [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1149.428395] env[61839]: DEBUG oslo.service.loopingcall [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1149.428592] env[61839]: DEBUG nova.compute.manager [-] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1149.428687] env[61839]: DEBUG nova.network.neutron [-] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1149.598256] env[61839]: DEBUG nova.scheduler.client.report [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1149.645929] env[61839]: DEBUG nova.compute.manager [req-0fd10a64-0ec4-478e-9d35-89ddea0acb87 req-9f8e1b63-c66f-46ae-ab4a-a5368ad743af service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Received event network-vif-deleted-6dd4bd1e-9764-46b3-920b-d6e253d24314 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1149.646680] env[61839]: INFO nova.compute.manager [req-0fd10a64-0ec4-478e-9d35-89ddea0acb87 req-9f8e1b63-c66f-46ae-ab4a-a5368ad743af service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Neutron deleted interface 6dd4bd1e-9764-46b3-920b-d6e253d24314; detaching it from the instance and deleting it from the info cache [ 1149.646680] env[61839]: DEBUG nova.network.neutron [req-0fd10a64-0ec4-478e-9d35-89ddea0acb87 req-9f8e1b63-c66f-46ae-ab4a-a5368ad743af service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.103600] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.643s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.125088] env[61839]: DEBUG nova.network.neutron [-] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.127066] env[61839]: INFO nova.scheduler.client.report [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Deleted allocations for instance a1defab7-8433-411d-b7e2-c31f6a34b8e0 [ 1150.151076] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5e01ef5-f921-44ea-b9a7-bb2584d90a4b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.161247] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d156a0b1-0a89-48a5-8d45-9c601f1f4843 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.186604] env[61839]: DEBUG nova.compute.manager [req-0fd10a64-0ec4-478e-9d35-89ddea0acb87 req-9f8e1b63-c66f-46ae-ab4a-a5368ad743af service nova] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Detach interface failed, port_id=6dd4bd1e-9764-46b3-920b-d6e253d24314, reason: Instance fa0cfb44-5ebf-4472-af93-8f8c518714fd could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1150.629340] env[61839]: INFO nova.compute.manager [-] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Took 1.20 seconds to deallocate network for instance. [ 1150.635414] env[61839]: DEBUG oslo_concurrency.lockutils [None req-30d08ea0-0408-44c6-8b48-7ee2e427afc2 tempest-AttachVolumeShelveTestJSON-1020874626 tempest-AttachVolumeShelveTestJSON-1020874626-project-member] Lock "a1defab7-8433-411d-b7e2-c31f6a34b8e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.196s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.138074] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.138074] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.138281] env[61839]: DEBUG nova.objects.instance [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lazy-loading 'resources' on Instance uuid fa0cfb44-5ebf-4472-af93-8f8c518714fd {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.697841] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e45c9f1-df05-4be9-beec-fcef25234a67 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.705471] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b575ab78-655d-4f03-b113-f633dbf2973a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.736697] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d310a747-3955-4291-9ddc-e35fde81fb13 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.743479] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da35bb2e-4ec5-489a-a320-b3c388824c91 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.756210] env[61839]: DEBUG nova.compute.provider_tree [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.259351] env[61839]: DEBUG nova.scheduler.client.report [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1152.764244] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.626s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.782074] env[61839]: INFO nova.scheduler.client.report [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Deleted allocations for instance fa0cfb44-5ebf-4472-af93-8f8c518714fd [ 1153.290492] env[61839]: DEBUG oslo_concurrency.lockutils [None req-1585d1e5-2a28-458c-8776-28252c7f0cb6 tempest-ServerTagsTestJSON-1057645175 tempest-ServerTagsTestJSON-1057645175-project-member] Lock "fa0cfb44-5ebf-4472-af93-8f8c518714fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.966s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.579487] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "af3978c4-3bd4-47da-a188-954bd6385183" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.579730] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.082654] env[61839]: DEBUG nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1154.607419] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.607688] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.609602] env[61839]: INFO nova.compute.claims [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.685020] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3aab86-ee0f-4122-815b-f16f740ffade {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.691972] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a0ff6f-b52f-409e-8405-cd97a0d67811 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.724389] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ee6efd-5b51-4eba-a07f-da6ef9537f37 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.732840] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d856e7a-f0ed-43e8-83fb-32e19b080512 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.746937] env[61839]: DEBUG nova.compute.provider_tree [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.250447] env[61839]: DEBUG nova.scheduler.client.report [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1156.760021] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.150s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.760021] env[61839]: DEBUG nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1157.265185] env[61839]: DEBUG nova.compute.utils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1157.266680] env[61839]: DEBUG nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1157.266860] env[61839]: DEBUG nova.network.neutron [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1157.321703] env[61839]: DEBUG nova.policy [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd83e8a2f63d4ae38c5989c1e3824e3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48d8c406ff504d71bba5fb74caf11c14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1157.567253] env[61839]: DEBUG nova.network.neutron [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Successfully created port: 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1157.771070] env[61839]: DEBUG nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1158.283897] env[61839]: INFO nova.virt.block_device [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Booting with volume 14ce5e6a-8a86-4817-b00b-46cc57cbdfa8 at /dev/sda [ 1158.319764] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62b73370-1c24-4705-8ecf-7ab165aa7597 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.329607] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dfa7a4-8fbf-4b70-9880-934a859770c0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.353583] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff6a855e-a89b-4528-8129-dab68da539da {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.362131] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06074b7f-5aea-45f6-b779-4242c8686a4c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.385335] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa9f723-2235-4ee9-9fa9-7739862b261c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.391679] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db3d48e-3b92-4eef-b18b-9122c8681b36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.404425] env[61839]: DEBUG nova.virt.block_device [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating existing volume attachment record: 36487084-3d25-4785-b4eb-6d726fa8fe3c {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1158.925073] env[61839]: DEBUG nova.compute.manager [req-2aa23343-3ccc-48cf-aae4-6f8e4c40d33c req-0eeb8038-cff9-4f04-8cc7-0769126f683f service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Received event network-vif-plugged-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1158.925073] env[61839]: DEBUG oslo_concurrency.lockutils [req-2aa23343-3ccc-48cf-aae4-6f8e4c40d33c req-0eeb8038-cff9-4f04-8cc7-0769126f683f service nova] Acquiring lock "af3978c4-3bd4-47da-a188-954bd6385183-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.925073] env[61839]: DEBUG oslo_concurrency.lockutils [req-2aa23343-3ccc-48cf-aae4-6f8e4c40d33c req-0eeb8038-cff9-4f04-8cc7-0769126f683f service nova] Lock "af3978c4-3bd4-47da-a188-954bd6385183-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.925073] env[61839]: DEBUG oslo_concurrency.lockutils [req-2aa23343-3ccc-48cf-aae4-6f8e4c40d33c req-0eeb8038-cff9-4f04-8cc7-0769126f683f service nova] Lock "af3978c4-3bd4-47da-a188-954bd6385183-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.925073] env[61839]: DEBUG nova.compute.manager [req-2aa23343-3ccc-48cf-aae4-6f8e4c40d33c req-0eeb8038-cff9-4f04-8cc7-0769126f683f service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] No waiting events found dispatching network-vif-plugged-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1158.925073] env[61839]: WARNING nova.compute.manager [req-2aa23343-3ccc-48cf-aae4-6f8e4c40d33c req-0eeb8038-cff9-4f04-8cc7-0769126f683f service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Received unexpected event network-vif-plugged-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af for instance with vm_state building and task_state block_device_mapping. [ 1159.008336] env[61839]: DEBUG nova.network.neutron [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Successfully updated port: 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1159.513716] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.514109] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.514109] env[61839]: DEBUG nova.network.neutron [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1160.046666] env[61839]: DEBUG nova.network.neutron [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1160.169552] env[61839]: DEBUG nova.network.neutron [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [{"id": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "address": "fa:16:3e:c6:e7:8f", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2d05ef-d3", "ovs_interfaceid": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.484582] env[61839]: DEBUG nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1160.485480] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1160.485823] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1160.486118] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1160.486446] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1160.486706] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1160.487018] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1160.487388] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1160.487686] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1160.488015] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1160.488340] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1160.488653] env[61839]: DEBUG nova.virt.hardware [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1160.490019] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a450b210-d7cc-43c0-be7b-e11c1cd65fdb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.504561] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee3eb10-7c79-4e7e-8a38-73d02b249491 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.673250] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.673618] env[61839]: DEBUG nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Instance network_info: |[{"id": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "address": "fa:16:3e:c6:e7:8f", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2d05ef-d3", "ovs_interfaceid": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1160.674084] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:e7:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e2d05ef-d36f-4c9a-980d-457ebeb1e2af', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1160.681558] env[61839]: DEBUG oslo.service.loopingcall [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1160.681779] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1160.682014] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e0841f9-99bf-45b5-a4ae-2336e331bc9c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.702891] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1160.702891] env[61839]: value = "task-1315148" [ 1160.702891] env[61839]: _type = "Task" [ 1160.702891] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.710777] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315148, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.954148] env[61839]: DEBUG nova.compute.manager [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Received event network-changed-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1160.954301] env[61839]: DEBUG nova.compute.manager [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Refreshing instance network info cache due to event network-changed-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1160.954529] env[61839]: DEBUG oslo_concurrency.lockutils [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] Acquiring lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.954680] env[61839]: DEBUG oslo_concurrency.lockutils [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] Acquired lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.954873] env[61839]: DEBUG nova.network.neutron [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Refreshing network info cache for port 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.213085] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315148, 'name': CreateVM_Task, 'duration_secs': 0.313015} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.213443] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1161.213951] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281464', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'name': 'volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'af3978c4-3bd4-47da-a188-954bd6385183', 'attached_at': '', 'detached_at': '', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'serial': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8'}, 'guest_format': None, 'attachment_id': '36487084-3d25-4785-b4eb-6d726fa8fe3c', 'mount_device': '/dev/sda', 'delete_on_termination': True, 'boot_index': 0, 'volume_type': None}], 'swap': None} {{(pid=61839) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1161.214194] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Root volume attach. Driver type: vmdk {{(pid=61839) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1161.214997] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5a85d9-87a6-464c-a8ff-39bda36e407b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.223105] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50be05a0-da1c-40b9-9c27-0a7237393c81 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.228720] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ac158a-0b54-4290-9de9-d12785cd3a44 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.234102] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-7b0f8cbf-8277-47b0-a118-c29a83c1b7fe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.240292] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1161.240292] env[61839]: value = "task-1315149" [ 1161.240292] env[61839]: _type = "Task" [ 1161.240292] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.247469] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315149, 'name': RelocateVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.657064] env[61839]: DEBUG nova.network.neutron [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updated VIF entry in instance network info cache for port 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1161.657499] env[61839]: DEBUG nova.network.neutron [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [{"id": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "address": "fa:16:3e:c6:e7:8f", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2d05ef-d3", "ovs_interfaceid": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.750088] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315149, 'name': RelocateVM_Task, 'duration_secs': 0.363623} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.750401] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1161.750608] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281464', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'name': 'volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'af3978c4-3bd4-47da-a188-954bd6385183', 'attached_at': '', 'detached_at': '', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'serial': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1161.751392] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d16147-e3d0-4dfa-bdb2-f9d871ac4bcd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.766843] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f161a524-e3dc-4af1-8f41-e939c838f23f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.789386] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8/volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1161.789638] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afcb09d4-37e5-4e0d-a75a-96af643d8dec {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.809896] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1161.809896] env[61839]: value = "task-1315150" [ 1161.809896] env[61839]: _type = "Task" [ 1161.809896] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.817645] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.160822] env[61839]: DEBUG oslo_concurrency.lockutils [req-eff72876-c8a8-4a30-8ddf-0806f7aa68da req-addf9ae6-a072-4688-acc0-1923a17755b2 service nova] Releasing lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1162.320156] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315150, 'name': ReconfigVM_Task, 'duration_secs': 0.245754} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.320618] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfigured VM instance instance-00000070 to attach disk [datastore2] volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8/volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1162.325143] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6db73ff9-5726-45f4-aa8f-9b4b463f1358 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.340717] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1162.340717] env[61839]: value = "task-1315151" [ 1162.340717] env[61839]: _type = "Task" [ 1162.340717] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.348973] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.850506] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315151, 'name': ReconfigVM_Task, 'duration_secs': 0.122336} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.850843] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281464', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'name': 'volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'af3978c4-3bd4-47da-a188-954bd6385183', 'attached_at': '', 'detached_at': '', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'serial': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1162.851523] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa09201c-99b2-4b47-ac08-a5c3e198a588 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.857853] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1162.857853] env[61839]: value = "task-1315152" [ 1162.857853] env[61839]: _type = "Task" [ 1162.857853] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.866445] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315152, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.368070] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315152, 'name': Rename_Task, 'duration_secs': 0.130721} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.368435] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1163.368576] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d07e1fe2-b4a8-469f-a3cc-212140fa6d96 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.374200] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1163.374200] env[61839]: value = "task-1315153" [ 1163.374200] env[61839]: _type = "Task" [ 1163.374200] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.381042] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.884033] env[61839]: DEBUG oslo_vmware.api [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315153, 'name': PowerOnVM_Task, 'duration_secs': 0.439857} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.884326] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1163.884536] env[61839]: INFO nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Took 3.40 seconds to spawn the instance on the hypervisor. [ 1163.884722] env[61839]: DEBUG nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1163.885599] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef40d6a-b323-48ef-9fce-c32923f12169 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.403431] env[61839]: INFO nova.compute.manager [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Took 9.82 seconds to build instance. [ 1164.905830] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4ed4d475-df90-4e03-a8f1-0c43404f6f5b tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.326s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.706178] env[61839]: DEBUG nova.compute.manager [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Received event network-changed-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1165.706421] env[61839]: DEBUG nova.compute.manager [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Refreshing instance network info cache due to event network-changed-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1165.706570] env[61839]: DEBUG oslo_concurrency.lockutils [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] Acquiring lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1165.706721] env[61839]: DEBUG oslo_concurrency.lockutils [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] Acquired lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.706889] env[61839]: DEBUG nova.network.neutron [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Refreshing network info cache for port 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1166.505039] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.505321] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.505530] env[61839]: INFO nova.compute.manager [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Shelving [ 1166.591597] env[61839]: DEBUG nova.network.neutron [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updated VIF entry in instance network info cache for port 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1166.592042] env[61839]: DEBUG nova.network.neutron [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updating instance_info_cache with network_info: [{"id": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "address": "fa:16:3e:17:50:4a", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6f5c89-f3", "ovs_interfaceid": "6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.013153] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.013530] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7cc28be-deac-4b64-b264-8e454beaddeb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.021623] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1167.021623] env[61839]: value = "task-1315154" [ 1167.021623] env[61839]: _type = "Task" [ 1167.021623] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.030013] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.095011] env[61839]: DEBUG oslo_concurrency.lockutils [req-acd9e35e-52a7-4b9d-a922-a9fe578f3053 req-a422c209-b879-4292-b6b0-4783210e504e service nova] Releasing lock "refresh_cache-7f7b3f51-3e96-49f1-a84a-81ae649e6938" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.137577] env[61839]: DEBUG nova.compute.manager [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Stashing vm_state: active {{(pid=61839) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1167.532069] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315154, 'name': PowerOffVM_Task, 'duration_secs': 0.333984} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.532367] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.533158] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3082c5-ae45-4219-85bd-f3f1390f3607 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.550588] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2329d3-441e-43b3-8aa9-8fddba4ec38f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.658788] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.659083] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.731998] env[61839]: DEBUG nova.compute.manager [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Received event network-changed-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1167.732243] env[61839]: DEBUG nova.compute.manager [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Refreshing instance network info cache due to event network-changed-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1167.732464] env[61839]: DEBUG oslo_concurrency.lockutils [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] Acquiring lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1167.732619] env[61839]: DEBUG oslo_concurrency.lockutils [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] Acquired lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.732785] env[61839]: DEBUG nova.network.neutron [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Refreshing network info cache for port 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.060424] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Creating Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1168.060898] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fb77ab37-390c-4f19-bf0c-bd21b55af6a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.068732] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1168.068732] env[61839]: value = "task-1315155" [ 1168.068732] env[61839]: _type = "Task" [ 1168.068732] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.076506] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315155, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.164330] env[61839]: INFO nova.compute.claims [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1168.486708] env[61839]: DEBUG nova.network.neutron [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updated VIF entry in instance network info cache for port 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1168.487138] env[61839]: DEBUG nova.network.neutron [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [{"id": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "address": "fa:16:3e:c6:e7:8f", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2d05ef-d3", "ovs_interfaceid": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.580462] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315155, 'name': CreateSnapshot_Task, 'duration_secs': 0.509973} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.580710] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Created Snapshot of the VM instance {{(pid=61839) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1168.581475] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a27412-da32-4e39-9108-e3396d5fb9e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.670206] env[61839]: INFO nova.compute.resource_tracker [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating resource usage from migration 073a9554-0804-427e-863d-fc302a912aa4 [ 1168.732934] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490fa94b-0bee-41a0-a24a-df38174adc00 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.740532] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2454c21-5bb0-4fd2-86b5-824148afd479 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.770359] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7ed3c0-a61d-45c3-9126-6d804686c992 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.777449] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3b0f70-45db-481d-a96f-edcab411b152 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.791211] env[61839]: DEBUG nova.compute.provider_tree [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.989696] env[61839]: DEBUG oslo_concurrency.lockutils [req-cc7365b3-e378-4169-8dd3-dfb4707e8b81 req-7c88888a-9674-4722-8da0-520de4e6cbfe service nova] Releasing lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.098594] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Creating linked-clone VM from snapshot {{(pid=61839) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1169.098943] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9fdc79df-a6de-4754-93d9-6f35aabbcebf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.109171] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1169.109171] env[61839]: value = "task-1315156" [ 1169.109171] env[61839]: _type = "Task" [ 1169.109171] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.116810] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.294434] env[61839]: DEBUG nova.scheduler.client.report [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1169.619797] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 94%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.799738] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.140s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.800127] env[61839]: INFO nova.compute.manager [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Migrating [ 1170.119448] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.315764] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.316069] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.316286] env[61839]: DEBUG nova.network.neutron [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1170.620213] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.028947] env[61839]: DEBUG nova.network.neutron [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [{"id": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "address": "fa:16:3e:c6:e7:8f", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2d05ef-d3", "ovs_interfaceid": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.123753] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.532265] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.622789] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.122681] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.624053] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.047182] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41436559-60a6-4735-b865-7a351170adf2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.066249] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance 'af3978c4-3bd4-47da-a188-954bd6385183' progress to 0 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1173.123637] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.572293] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1173.572630] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84f4f7b0-f78f-4c53-ac28-bf513e9c70ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.579753] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1173.579753] env[61839]: value = "task-1315157" [ 1173.579753] env[61839]: _type = "Task" [ 1173.579753] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.588146] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.623831] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.091125] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315157, 'name': PowerOffVM_Task, 'duration_secs': 0.159594} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.091125] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1174.091125] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance 'af3978c4-3bd4-47da-a188-954bd6385183' progress to 17 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1174.125181] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315156, 'name': CloneVM_Task, 'duration_secs': 4.854668} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.125464] env[61839]: INFO nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Created linked-clone VM from snapshot [ 1174.126218] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089d9d19-c493-4716-903a-b007871dd335 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.134400] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Uploading image e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1174.159987] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1174.159987] env[61839]: value = "vm-281467" [ 1174.159987] env[61839]: _type = "VirtualMachine" [ 1174.159987] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1174.160267] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-25d75c4b-4f62-4c5f-87bc-157e7865be89 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.167771] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease: (returnval){ [ 1174.167771] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526c7002-559d-bce5-49b7-3789b41be9b8" [ 1174.167771] env[61839]: _type = "HttpNfcLease" [ 1174.167771] env[61839]: } obtained for exporting VM: (result){ [ 1174.167771] env[61839]: value = "vm-281467" [ 1174.167771] env[61839]: _type = "VirtualMachine" [ 1174.167771] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1174.168064] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the lease: (returnval){ [ 1174.168064] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526c7002-559d-bce5-49b7-3789b41be9b8" [ 1174.168064] env[61839]: _type = "HttpNfcLease" [ 1174.168064] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1174.174038] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1174.174038] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526c7002-559d-bce5-49b7-3789b41be9b8" [ 1174.174038] env[61839]: _type = "HttpNfcLease" [ 1174.174038] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1174.596842] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1174.597124] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1174.597292] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1174.597489] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1174.597643] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1174.597797] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1174.598017] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1174.598215] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1174.598396] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1174.598564] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1174.598745] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1174.603738] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4efb75d-0bda-44cd-a242-571d88e4b9cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.619514] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1174.619514] env[61839]: value = "task-1315159" [ 1174.619514] env[61839]: _type = "Task" [ 1174.619514] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.628945] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315159, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.676265] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1174.676265] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526c7002-559d-bce5-49b7-3789b41be9b8" [ 1174.676265] env[61839]: _type = "HttpNfcLease" [ 1174.676265] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1174.676622] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1174.676622] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]526c7002-559d-bce5-49b7-3789b41be9b8" [ 1174.676622] env[61839]: _type = "HttpNfcLease" [ 1174.676622] env[61839]: }. {{(pid=61839) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1174.677333] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8cd5b0-8bd4-4aa7-a354-6b8be71e10dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.684352] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69e9-f1ac-44d5-9747-3806a540dfc2/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1174.684534] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69e9-f1ac-44d5-9747-3806a540dfc2/disk-0.vmdk for reading. {{(pid=61839) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1174.778731] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-423d8f2a-40be-43e1-9ebe-c93279c0c64e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.129797] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315159, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.630801] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315159, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.130911] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315159, 'name': ReconfigVM_Task, 'duration_secs': 1.138235} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.131737] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance 'af3978c4-3bd4-47da-a188-954bd6385183' progress to 33 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.638465] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1176.638770] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1176.638940] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.639151] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1176.639351] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.639515] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1176.639730] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1176.639896] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1176.640083] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1176.640255] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1176.640437] env[61839]: DEBUG nova.virt.hardware [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1176.645823] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1176.646145] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9c802cf-4a7f-40e4-8ef3-3a371eacee50 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.665735] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1176.665735] env[61839]: value = "task-1315160" [ 1176.665735] env[61839]: _type = "Task" [ 1176.665735] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.674191] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315160, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.175924] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315160, 'name': ReconfigVM_Task, 'duration_secs': 0.228793} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.176263] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1177.177073] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941e298e-eb6d-41dd-b502-beb90c53f833 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.200863] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8/volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.201618] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f3e2563-294b-48fc-8433-acc196e2f32c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.220856] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1177.220856] env[61839]: value = "task-1315161" [ 1177.220856] env[61839]: _type = "Task" [ 1177.220856] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.229750] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315161, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.732024] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315161, 'name': ReconfigVM_Task, 'duration_secs': 0.321658} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.732392] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfigured VM instance instance-00000070 to attach disk [datastore2] volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8/volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1177.732682] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance 'af3978c4-3bd4-47da-a188-954bd6385183' progress to 50 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1178.241549] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67398ddb-9638-48a0-857d-d26e1d908660 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.271506] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d18524-7e74-4d15-947f-a04de787d52c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.290786] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance 'af3978c4-3bd4-47da-a188-954bd6385183' progress to 67 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1179.949329] env[61839]: DEBUG nova.network.neutron [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Port 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af binding to destination host cpu-1 is already ACTIVE {{(pid=61839) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1180.968057] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "af3978c4-3bd4-47da-a188-954bd6385183-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.968461] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.968652] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.833430] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69e9-f1ac-44d5-9747-3806a540dfc2/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1181.834552] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45e696c-1768-470e-b0c3-4a3c7202e0a9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.841091] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69e9-f1ac-44d5-9747-3806a540dfc2/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1181.841268] env[61839]: ERROR oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69e9-f1ac-44d5-9747-3806a540dfc2/disk-0.vmdk due to incomplete transfer. [ 1181.841487] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1f28297b-e215-4ce9-a986-b15ead961a05 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.848071] env[61839]: DEBUG oslo_vmware.rw_handles [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69e9-f1ac-44d5-9747-3806a540dfc2/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1181.848271] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Uploaded image e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f to the Glance image server {{(pid=61839) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1181.850429] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Destroying the VM {{(pid=61839) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1181.850650] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-54ed68b2-b876-4a6b-81a3-908d87b2bd59 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.855694] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1181.855694] env[61839]: value = "task-1315162" [ 1181.855694] env[61839]: _type = "Task" [ 1181.855694] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.863113] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315162, 'name': Destroy_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.010583] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.010871] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.010959] env[61839]: DEBUG nova.network.neutron [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1182.366782] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315162, 'name': Destroy_Task, 'duration_secs': 0.286073} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.367058] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Destroyed the VM [ 1182.367348] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Deleting Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1182.367616] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d3138237-1fe8-4b74-9d69-be64737582f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.373966] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1182.373966] env[61839]: value = "task-1315163" [ 1182.373966] env[61839]: _type = "Task" [ 1182.373966] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.381433] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315163, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.842811] env[61839]: DEBUG nova.network.neutron [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [{"id": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "address": "fa:16:3e:c6:e7:8f", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2d05ef-d3", "ovs_interfaceid": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.883367] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315163, 'name': RemoveSnapshot_Task, 'duration_secs': 0.32053} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.883601] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Deleted Snapshot of the VM instance {{(pid=61839) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1182.883876] env[61839]: DEBUG nova.compute.manager [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1182.884631] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca1c669-10b0-4ae4-a7c1-3769136eb237 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.346595] env[61839]: DEBUG oslo_concurrency.lockutils [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.396587] env[61839]: INFO nova.compute.manager [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Shelve offloading [ 1183.398287] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1183.398545] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18880760-8930-4021-9947-a91e2366a8c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.405878] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1183.405878] env[61839]: value = "task-1315164" [ 1183.405878] env[61839]: _type = "Task" [ 1183.405878] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.414199] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315164, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.858910] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08aef40f-524e-4c13-bc96-702a0fe8c090 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.866656] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70ae32c-b7a3-468f-8622-978771e464bb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.916130] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] VM already powered off {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1183.917159] env[61839]: DEBUG nova.compute.manager [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1183.917159] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3f2859-a0ce-4d2b-aa85-32299d95c2e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.922623] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.922796] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.922969] env[61839]: DEBUG nova.network.neutron [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.618628] env[61839]: DEBUG nova.network.neutron [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.967744] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a31af3-2ca0-490e-a841-e80738baae88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.987424] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e915e62-20b8-420a-a98d-96fb244cf2f8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.994747] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance 'af3978c4-3bd4-47da-a188-954bd6385183' progress to 83 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1185.121456] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.323219] env[61839]: DEBUG nova.compute.manager [req-fe2489b9-5b5d-4ec7-903f-e695c7c3ab38 req-d3d21985-6375-49ce-886b-ea081fc7865e service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-vif-unplugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1185.323453] env[61839]: DEBUG oslo_concurrency.lockutils [req-fe2489b9-5b5d-4ec7-903f-e695c7c3ab38 req-d3d21985-6375-49ce-886b-ea081fc7865e service nova] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.323668] env[61839]: DEBUG oslo_concurrency.lockutils [req-fe2489b9-5b5d-4ec7-903f-e695c7c3ab38 req-d3d21985-6375-49ce-886b-ea081fc7865e service nova] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.323839] env[61839]: DEBUG oslo_concurrency.lockutils [req-fe2489b9-5b5d-4ec7-903f-e695c7c3ab38 req-d3d21985-6375-49ce-886b-ea081fc7865e service nova] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.324023] env[61839]: DEBUG nova.compute.manager [req-fe2489b9-5b5d-4ec7-903f-e695c7c3ab38 req-d3d21985-6375-49ce-886b-ea081fc7865e service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] No waiting events found dispatching network-vif-unplugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1185.324206] env[61839]: WARNING nova.compute.manager [req-fe2489b9-5b5d-4ec7-903f-e695c7c3ab38 req-d3d21985-6375-49ce-886b-ea081fc7865e service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received unexpected event network-vif-unplugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 for instance with vm_state shelved and task_state shelving_offloading. [ 1185.414251] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.415166] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13668cb3-bd8d-4c5a-9db6-3c6dfcad7fae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.422721] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1185.422948] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd35b0c6-16e5-4f4e-94fd-a022ce3e9eae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.500907] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1185.501237] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcb5bf8b-4ce9-4628-9b20-c9fc599f4e33 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.504535] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1185.504743] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1185.504928] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleting the datastore file [datastore1] 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.505556] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e3077a3-1c41-4dbc-bb90-67dccd38be5e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.509952] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1185.509952] env[61839]: value = "task-1315166" [ 1185.509952] env[61839]: _type = "Task" [ 1185.509952] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.513621] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1185.513621] env[61839]: value = "task-1315167" [ 1185.513621] env[61839]: _type = "Task" [ 1185.513621] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.519433] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315166, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.524328] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.030704] env[61839]: DEBUG oslo_vmware.api [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144552} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.031104] env[61839]: DEBUG oslo_vmware.api [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315166, 'name': PowerOnVM_Task, 'duration_secs': 0.370272} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.031397] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.031675] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.031949] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.034650] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1186.034932] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-06137254-4d69-47ff-9f00-7ace8f632e0a tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance 'af3978c4-3bd4-47da-a188-954bd6385183' progress to 100 {{(pid=61839) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1186.074735] env[61839]: INFO nova.scheduler.client.report [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted allocations for instance 62af556c-c9b1-4de6-bb07-532ba67fa367 [ 1186.580662] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.580937] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.581180] env[61839]: DEBUG nova.objects.instance [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'resources' on Instance uuid 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.086992] env[61839]: DEBUG nova.objects.instance [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'numa_topology' on Instance uuid 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.384776] env[61839]: DEBUG nova.compute.manager [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1187.384979] env[61839]: DEBUG nova.compute.manager [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing instance network info cache due to event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1187.385251] env[61839]: DEBUG oslo_concurrency.lockutils [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.385374] env[61839]: DEBUG oslo_concurrency.lockutils [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.385517] env[61839]: DEBUG nova.network.neutron [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1187.594647] env[61839]: DEBUG nova.objects.base [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Object Instance<62af556c-c9b1-4de6-bb07-532ba67fa367> lazy-loaded attributes: resources,numa_topology {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1187.656586] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc63ff0-1121-40dc-911c-1975b504e654 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.664328] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f10ff1-9e6b-443f-9ce1-f7f086ec7c7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.693837] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67b1de5-5086-403a-b042-8ee767f6ea08 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.701044] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83432eef-780d-4cc2-9787-0e0364ea717d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.714036] env[61839]: DEBUG nova.compute.provider_tree [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.871222] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "af3978c4-3bd4-47da-a188-954bd6385183" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.871583] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.871855] env[61839]: DEBUG nova.compute.manager [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Going to confirm migration 6 {{(pid=61839) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1188.131524] env[61839]: DEBUG nova.network.neutron [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updated VIF entry in instance network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1188.131891] env[61839]: DEBUG nova.network.neutron [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": null, "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap539fa2ab-47", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.217190] env[61839]: DEBUG nova.scheduler.client.report [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1188.407809] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.408006] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquired lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.408204] env[61839]: DEBUG nova.network.neutron [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.408396] env[61839]: DEBUG nova.objects.instance [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'info_cache' on Instance uuid af3978c4-3bd4-47da-a188-954bd6385183 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.605315] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.634643] env[61839]: DEBUG oslo_concurrency.lockutils [req-a37d8bf8-fb42-4b6e-9960-22bec6e157d1 req-f2899838-9110-4f64-a359-792f4c8a54d6 service nova] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.722406] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.230300] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c655b008-6120-479a-a49e-636fa3ebd722 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.725s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.230972] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.626s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.231185] env[61839]: INFO nova.compute.manager [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Unshelving [ 1189.489761] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.603462] env[61839]: DEBUG nova.network.neutron [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [{"id": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "address": "fa:16:3e:c6:e7:8f", "network": {"id": "90aa5d36-d3ac-4a62-9f9d-7578cd9ac466", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2104030849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48d8c406ff504d71bba5fb74caf11c14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2d05ef-d3", "ovs_interfaceid": "9e2d05ef-d36f-4c9a-980d-457ebeb1e2af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.106464] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Releasing lock "refresh_cache-af3978c4-3bd4-47da-a188-954bd6385183" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.106706] env[61839]: DEBUG nova.objects.instance [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'migration_context' on Instance uuid af3978c4-3bd4-47da-a188-954bd6385183 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.259169] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.259532] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.259688] env[61839]: DEBUG nova.objects.instance [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'pci_requests' on Instance uuid 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.609698] env[61839]: DEBUG nova.objects.base [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1190.610952] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e93def-621d-4fd9-8aec-36cdd032fd6a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.629971] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f71f7f0-0105-49d6-a40a-773bd3c402c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.635295] env[61839]: DEBUG oslo_vmware.api [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1190.635295] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524893e2-8d3e-49a6-5f9f-03273c2763e9" [ 1190.635295] env[61839]: _type = "Task" [ 1190.635295] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.642457] env[61839]: DEBUG oslo_vmware.api [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524893e2-8d3e-49a6-5f9f-03273c2763e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.763552] env[61839]: DEBUG nova.objects.instance [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'numa_topology' on Instance uuid 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.145483] env[61839]: DEBUG oslo_vmware.api [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]524893e2-8d3e-49a6-5f9f-03273c2763e9, 'name': SearchDatastore_Task, 'duration_secs': 0.007556} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.145733] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.266588] env[61839]: INFO nova.compute.claims [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1191.488938] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.343470] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae611ffa-0b79-4e6b-a284-d959dde4656f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.353650] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25678084-4b77-486f-afcf-2aed080c805a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.382771] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bb5926-76e4-44e0-ba82-3c9d37bfdf36 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.389710] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0b1bed-973d-454d-9ede-85080b99df2b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.402164] env[61839]: DEBUG nova.compute.provider_tree [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.488738] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.488955] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.905602] env[61839]: DEBUG nova.scheduler.client.report [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1192.991362] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.410904] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.151s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.413047] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.267s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.440873] env[61839]: INFO nova.network.neutron [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1193.981830] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b792d7ba-2a52-4bce-acee-d4d47f135245 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.989765] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562f0c00-6958-4fcb-a1d8-322ef503da69 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.020155] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551c20a8-2eae-4738-b94f-6cba65f4ebdd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.027423] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5737ef1b-0d67-4a3a-9ef9-4bea75e43025 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.041195] env[61839]: DEBUG nova.compute.provider_tree [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.544705] env[61839]: DEBUG nova.scheduler.client.report [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1194.815150] env[61839]: DEBUG nova.compute.manager [req-37299303-46a3-4ef2-aa45-b6a4717fca66 req-4a11784a-608c-4ac3-abd4-feaf9b2df87c service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-vif-plugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1194.815150] env[61839]: DEBUG oslo_concurrency.lockutils [req-37299303-46a3-4ef2-aa45-b6a4717fca66 req-4a11784a-608c-4ac3-abd4-feaf9b2df87c service nova] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.815150] env[61839]: DEBUG oslo_concurrency.lockutils [req-37299303-46a3-4ef2-aa45-b6a4717fca66 req-4a11784a-608c-4ac3-abd4-feaf9b2df87c service nova] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.815150] env[61839]: DEBUG oslo_concurrency.lockutils [req-37299303-46a3-4ef2-aa45-b6a4717fca66 req-4a11784a-608c-4ac3-abd4-feaf9b2df87c service nova] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.815150] env[61839]: DEBUG nova.compute.manager [req-37299303-46a3-4ef2-aa45-b6a4717fca66 req-4a11784a-608c-4ac3-abd4-feaf9b2df87c service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] No waiting events found dispatching network-vif-plugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1194.815150] env[61839]: WARNING nova.compute.manager [req-37299303-46a3-4ef2-aa45-b6a4717fca66 req-4a11784a-608c-4ac3-abd4-feaf9b2df87c service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received unexpected event network-vif-plugged-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 for instance with vm_state shelved_offloaded and task_state spawning. [ 1194.898237] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.898237] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.898237] env[61839]: DEBUG nova.network.neutron [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1195.554820] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.142s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.557601] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.566s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.557805] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.557966] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1195.559097] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe2967c-558d-4062-950f-e8b87644068c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.567536] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fc1000-9399-4ba5-ad9b-e5b5cfd0021b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.583974] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6579e582-d464-45ec-8ab3-511dd3b979c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.590861] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc53eef0-6fb4-4cef-9811-b8067dc59a85 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.620684] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180925MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1195.620836] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.621046] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.623277] env[61839]: DEBUG nova.network.neutron [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.111322] env[61839]: INFO nova.scheduler.client.report [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted allocation for migration 073a9554-0804-427e-863d-fc302a912aa4 [ 1196.128359] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.153075] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='fd3063f40c93000c0fc3555e54bf9a78',container_format='bare',created_at=2024-10-18T17:03:45Z,direct_url=,disk_format='vmdk',id=e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1521392275-shelved',owner='5f789f3900a347b59c491e9d141fb9e7',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-10-18T17:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1196.153343] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1196.153510] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.153699] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1196.153853] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.154016] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1196.154232] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1196.154405] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1196.154648] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1196.154831] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1196.155025] env[61839]: DEBUG nova.virt.hardware [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1196.156132] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294c0e29-43d0-44b1-b75c-d3bd79fff991 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.166472] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe5eaa7-bbc2-485c-bb1e-69d182a9e437 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.180116] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:e7:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '539fa2ab-4763-4c46-ae0d-4db54ad64fa5', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.187325] env[61839]: DEBUG oslo.service.loopingcall [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.187832] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1196.188065] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74966eaf-b4aa-4506-a7e1-a394b26cdbce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.207505] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.207505] env[61839]: value = "task-1315168" [ 1196.207505] env[61839]: _type = "Task" [ 1196.207505] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.214729] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315168, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.616925] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d3e930f7-e40c-49a0-8871-a0fe292edb39 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.745s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.647203] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.647415] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 3e153d8a-e069-443c-9db4-7614a6475971 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.647560] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance af3978c4-3bd4-47da-a188-954bd6385183 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.647683] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 62af556c-c9b1-4de6-bb07-532ba67fa367 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.647857] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1196.647994] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1344MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1196.694034] env[61839]: INFO nova.compute.manager [None req-5e064b2b-db24-4d55-ba89-3304a69a8275 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Get console output [ 1196.694381] env[61839]: WARNING nova.virt.vmwareapi.driver [None req-5e064b2b-db24-4d55-ba89-3304a69a8275 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] The console log is missing. Check your VSPC configuration [ 1196.706478] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304bbc43-1947-43eb-af79-ada810bcbfea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.719427] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941526f5-4b94-4a12-863d-f478116b4743 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.722293] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315168, 'name': CreateVM_Task, 'duration_secs': 0.283672} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.722432] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1196.723378] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.723551] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.723921] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1196.724170] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd8ce3eb-4d8d-4387-a637-a4ece43f672c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.749386] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a443d932-4a0d-4c89-a039-acb4d0edff49 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.752595] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1196.752595] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e4d0f3-2912-6bce-ac91-4c21de931089" [ 1196.752595] env[61839]: _type = "Task" [ 1196.752595] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.760110] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661d8ae8-416f-484e-82e9-e28e808d69e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.766362] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52e4d0f3-2912-6bce-ac91-4c21de931089, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.775567] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.838275] env[61839]: DEBUG nova.compute.manager [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1196.838275] env[61839]: DEBUG nova.compute.manager [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing instance network info cache due to event network-changed-539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1196.838275] env[61839]: DEBUG oslo_concurrency.lockutils [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.838404] env[61839]: DEBUG oslo_concurrency.lockutils [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.838469] env[61839]: DEBUG nova.network.neutron [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Refreshing network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1197.263108] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.263395] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Processing image e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.263648] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.263804] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.263988] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1197.264258] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dcef1f7-82a4-483c-9d9e-f4d6c2f05a4c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.271981] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1197.272180] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1197.272835] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ac05905-ad63-4412-b1d7-dd59ae80c5de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.278035] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1197.281201] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1197.281201] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5225c08c-034e-adb8-f587-7fbe5dc69e35" [ 1197.281201] env[61839]: _type = "Task" [ 1197.281201] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.289019] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5225c08c-034e-adb8-f587-7fbe5dc69e35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.541612] env[61839]: DEBUG nova.network.neutron [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updated VIF entry in instance network info cache for port 539fa2ab-4763-4c46-ae0d-4db54ad64fa5. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1197.542039] env[61839]: DEBUG nova.network.neutron [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.783098] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1197.783414] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.162s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.793116] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Preparing fetch location {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1197.793369] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Fetch image to [datastore2] OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf/OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf.vmdk {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1197.793563] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Downloading stream optimized image e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f to [datastore2] OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf/OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf.vmdk on the data store datastore2 as vApp {{(pid=61839) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1197.793740] env[61839]: DEBUG nova.virt.vmwareapi.images [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Downloading image file data e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f to the ESX as VM named 'OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf' {{(pid=61839) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1197.856584] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1197.856584] env[61839]: value = "resgroup-9" [ 1197.856584] env[61839]: _type = "ResourcePool" [ 1197.856584] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1197.856863] env[61839]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-40f87511-9dd6-4171-a46e-53216235d5f2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.877896] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease: (returnval){ [ 1197.877896] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525af419-2b62-2684-d22d-a5d759710c8f" [ 1197.877896] env[61839]: _type = "HttpNfcLease" [ 1197.877896] env[61839]: } obtained for vApp import into resource pool (val){ [ 1197.877896] env[61839]: value = "resgroup-9" [ 1197.877896] env[61839]: _type = "ResourcePool" [ 1197.877896] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1197.878218] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the lease: (returnval){ [ 1197.878218] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525af419-2b62-2684-d22d-a5d759710c8f" [ 1197.878218] env[61839]: _type = "HttpNfcLease" [ 1197.878218] env[61839]: } to be ready. {{(pid=61839) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1197.884287] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1197.884287] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525af419-2b62-2684-d22d-a5d759710c8f" [ 1197.884287] env[61839]: _type = "HttpNfcLease" [ 1197.884287] env[61839]: } is initializing. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1198.045388] env[61839]: DEBUG oslo_concurrency.lockutils [req-34a53068-0254-4c47-92b8-f483097ae3d3 req-ab3ec5b1-6e19-4d27-b12c-4036b45bde59 service nova] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.386771] env[61839]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1198.386771] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525af419-2b62-2684-d22d-a5d759710c8f" [ 1198.386771] env[61839]: _type = "HttpNfcLease" [ 1198.386771] env[61839]: } is ready. {{(pid=61839) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1198.387041] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1198.387041] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]525af419-2b62-2684-d22d-a5d759710c8f" [ 1198.387041] env[61839]: _type = "HttpNfcLease" [ 1198.387041] env[61839]: }. {{(pid=61839) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1198.387822] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd66f46b-5280-4176-bacb-4352f498fd96 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.394949] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521ff6c6-5ed0-2721-1277-8c0303084624/disk-0.vmdk from lease info. {{(pid=61839) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1198.395146] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521ff6c6-5ed0-2721-1277-8c0303084624/disk-0.vmdk. {{(pid=61839) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1198.458738] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4a850afd-0482-4f4c-9091-5a8d3189ffc5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.784542] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1198.784872] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.291814] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.292042] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1199.528783] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Completed reading data from the image iterator. {{(pid=61839) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1199.529044] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521ff6c6-5ed0-2721-1277-8c0303084624/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1199.529952] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40c0594-4103-4a84-b866-e849c4f949d5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.536817] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521ff6c6-5ed0-2721-1277-8c0303084624/disk-0.vmdk is in state: ready. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1199.536986] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521ff6c6-5ed0-2721-1277-8c0303084624/disk-0.vmdk. {{(pid=61839) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1199.537245] env[61839]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-623d3945-2c81-40cd-9243-43ac5d78cceb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.721588] env[61839]: DEBUG oslo_vmware.rw_handles [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521ff6c6-5ed0-2721-1277-8c0303084624/disk-0.vmdk. {{(pid=61839) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1199.721831] env[61839]: INFO nova.virt.vmwareapi.images [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Downloaded image file data e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f [ 1199.722775] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea701af-caab-4bbb-b6b7-1e7a464d4410 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.738463] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7039088a-ac31-4d20-94a3-c23a4dc8f6ea {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.764963] env[61839]: INFO nova.virt.vmwareapi.images [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] The imported VM was unregistered [ 1199.767438] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Caching image {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1199.767671] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Creating directory with path [datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1199.767949] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90dcb640-ea6e-4028-b78e-ceae5a0d00db {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.786890] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Created directory with path [datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1199.787152] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf/OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf.vmdk to [datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f.vmdk. {{(pid=61839) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1199.787418] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-741a0b06-37cb-47e3-bf03-83703e761a2a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.795350] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1199.795350] env[61839]: value = "task-1315171" [ 1199.795350] env[61839]: _type = "Task" [ 1199.795350] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.803151] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315171, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.298434] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.298666] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.298891] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1200.309846] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315171, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.813542] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315171, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.312360] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315171, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.548763] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [{"id": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "address": "fa:16:3e:3b:e7:ff", "network": {"id": "54f10e74-ee7b-499c-a6b6-a27d337719cd", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-79581761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f789f3900a347b59c491e9d141fb9e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539fa2ab-47", "ovs_interfaceid": "539fa2ab-4763-4c46-ae0d-4db54ad64fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.812689] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315171, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.052172] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-62af556c-c9b1-4de6-bb07-532ba67fa367" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.052499] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1202.052627] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.052867] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.053038] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.053175] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1202.314361] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315171, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.397997} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.314595] env[61839]: INFO nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf/OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf.vmdk to [datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f.vmdk. [ 1202.314770] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Cleaning up location [datastore2] OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1202.314938] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_e8d69a51-729f-454c-9b07-d4d8ee622ecf {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1202.315226] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-114f8fd7-f25d-412b-b06a-c29f330105a5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.321537] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1202.321537] env[61839]: value = "task-1315172" [ 1202.321537] env[61839]: _type = "Task" [ 1202.321537] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.328860] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.831257] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038506} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.831512] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.831683] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.831935] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f.vmdk to [datastore2] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1202.832207] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e515f3c-0f46-436a-a339-fef880d8784e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.838887] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1202.838887] env[61839]: value = "task-1315173" [ 1202.838887] env[61839]: _type = "Task" [ 1202.838887] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.845715] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.349597] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315173, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.851160] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315173, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.351794] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315173, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.853242] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315173, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.353911] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315173, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.378409} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.354243] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f/e8bff9ad-61f2-40ab-ac2e-835f4ec42d2f.vmdk to [datastore2] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1205.356168] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ab1531-762e-4725-b1a7-69473480cb5d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.377181] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1205.378713] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcf39794-723a-4bdb-acf8-d471367e928c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.398617] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1205.398617] env[61839]: value = "task-1315174" [ 1205.398617] env[61839]: _type = "Task" [ 1205.398617] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.407320] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315174, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.909110] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315174, 'name': ReconfigVM_Task, 'duration_secs': 0.29052} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.909363] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 62af556c-c9b1-4de6-bb07-532ba67fa367/62af556c-c9b1-4de6-bb07-532ba67fa367.vmdk or device None with type streamOptimized {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1205.910029] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-beefa9af-e053-4d1e-b77d-d3874f768596 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.916411] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1205.916411] env[61839]: value = "task-1315175" [ 1205.916411] env[61839]: _type = "Task" [ 1205.916411] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.923822] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315175, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.426571] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315175, 'name': Rename_Task, 'duration_secs': 0.170584} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.426920] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1206.427079] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7b0d348-875c-4424-a553-095b3912ad00 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.433912] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1206.433912] env[61839]: value = "task-1315176" [ 1206.433912] env[61839]: _type = "Task" [ 1206.433912] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.441701] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315176, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.945646] env[61839]: DEBUG oslo_vmware.api [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315176, 'name': PowerOnVM_Task, 'duration_secs': 0.453579} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.945932] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1207.040894] env[61839]: DEBUG nova.compute.manager [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1207.041865] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446b229d-00d5-4bd3-b8e4-18edc695f3c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.557667] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e48722e3-47fd-4614-af49-3795409abf36 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.326s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.306143] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.306445] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.306677] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.306874] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.307059] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.309214] env[61839]: INFO nova.compute.manager [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Terminating instance [ 1208.310917] env[61839]: DEBUG nova.compute.manager [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1208.311145] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1208.311967] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724e74e1-07b8-4ebe-b27c-78078f5696af {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.319613] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1208.319830] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c94e8df-056a-45ea-b816-cf38203b0f0b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.326666] env[61839]: DEBUG oslo_vmware.api [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1208.326666] env[61839]: value = "task-1315177" [ 1208.326666] env[61839]: _type = "Task" [ 1208.326666] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.334212] env[61839]: DEBUG oslo_vmware.api [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.837864] env[61839]: DEBUG oslo_vmware.api [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315177, 'name': PowerOffVM_Task, 'duration_secs': 0.172011} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.838159] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1208.838334] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1208.838590] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54feed64-dc38-4b60-b91b-9336f9682f62 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.928431] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1208.928704] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1208.928905] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleting the datastore file [datastore2] 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1208.929216] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43fa28c7-9b4d-4bbb-a64d-dba76741f774 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.935649] env[61839]: DEBUG oslo_vmware.api [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for the task: (returnval){ [ 1208.935649] env[61839]: value = "task-1315179" [ 1208.935649] env[61839]: _type = "Task" [ 1208.935649] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.943016] env[61839]: DEBUG oslo_vmware.api [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.445850] env[61839]: DEBUG oslo_vmware.api [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Task: {'id': task-1315179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139571} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.446801] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1209.446801] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1209.446801] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1209.447055] env[61839]: INFO nova.compute.manager [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1209.447188] env[61839]: DEBUG oslo.service.loopingcall [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1209.447394] env[61839]: DEBUG nova.compute.manager [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1209.447517] env[61839]: DEBUG nova.network.neutron [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1210.120238] env[61839]: DEBUG nova.compute.manager [req-0f9887b4-a64e-4986-a64c-8ab5f2c2ca18 req-ccbd5453-97c0-468f-81f5-d041fcf80021 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Received event network-vif-deleted-539fa2ab-4763-4c46-ae0d-4db54ad64fa5 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1210.120553] env[61839]: INFO nova.compute.manager [req-0f9887b4-a64e-4986-a64c-8ab5f2c2ca18 req-ccbd5453-97c0-468f-81f5-d041fcf80021 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Neutron deleted interface 539fa2ab-4763-4c46-ae0d-4db54ad64fa5; detaching it from the instance and deleting it from the info cache [ 1210.120617] env[61839]: DEBUG nova.network.neutron [req-0f9887b4-a64e-4986-a64c-8ab5f2c2ca18 req-ccbd5453-97c0-468f-81f5-d041fcf80021 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.598110] env[61839]: DEBUG nova.network.neutron [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.624892] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-692b3a74-91b0-41b4-b6ad-e55a46b31ffd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.635566] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12eedf7b-7d17-4ec6-bfef-53861ea0bfe8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.661495] env[61839]: DEBUG nova.compute.manager [req-0f9887b4-a64e-4986-a64c-8ab5f2c2ca18 req-ccbd5453-97c0-468f-81f5-d041fcf80021 service nova] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Detach interface failed, port_id=539fa2ab-4763-4c46-ae0d-4db54ad64fa5, reason: Instance 62af556c-c9b1-4de6-bb07-532ba67fa367 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1211.101064] env[61839]: INFO nova.compute.manager [-] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Took 1.65 seconds to deallocate network for instance. [ 1211.607077] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.607537] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.607623] env[61839]: DEBUG nova.objects.instance [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lazy-loading 'resources' on Instance uuid 62af556c-c9b1-4de6-bb07-532ba67fa367 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.168703] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68beafbd-fe78-4a07-b6ad-c332925c069a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.176166] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbc79f9-dd17-4e88-90d6-e6ad18cd6470 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.205505] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f867fd-e042-4863-842b-a0d20ddcae72 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.212855] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92bee92-9d9b-40d9-8232-4a2102574e1f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.225585] env[61839]: DEBUG nova.compute.provider_tree [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.728523] env[61839]: DEBUG nova.scheduler.client.report [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1213.235339] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.253832] env[61839]: INFO nova.scheduler.client.report [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Deleted allocations for instance 62af556c-c9b1-4de6-bb07-532ba67fa367 [ 1213.762849] env[61839]: DEBUG oslo_concurrency.lockutils [None req-c09c8899-dd3d-4079-ba71-5b5415f9bb54 tempest-ServerActionsTestOtherB-75710309 tempest-ServerActionsTestOtherB-75710309-project-member] Lock "62af556c-c9b1-4de6-bb07-532ba67fa367" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.456s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.755103] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "af3978c4-3bd4-47da-a188-954bd6385183" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.755103] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.755103] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "af3978c4-3bd4-47da-a188-954bd6385183-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.755103] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.755491] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.758324] env[61839]: INFO nova.compute.manager [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Terminating instance [ 1222.759495] env[61839]: DEBUG nova.compute.manager [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1222.759711] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1222.759952] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f31e11a-e57b-4897-9812-8fa5ef7613c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.768048] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1222.768048] env[61839]: value = "task-1315181" [ 1222.768048] env[61839]: _type = "Task" [ 1222.768048] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.776115] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.896581] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.896880] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.277734] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315181, 'name': PowerOffVM_Task, 'duration_secs': 0.175129} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.277996] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1223.278218] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1223.278427] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281464', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'name': 'volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'af3978c4-3bd4-47da-a188-954bd6385183', 'attached_at': '2024-10-18T17:04:03.000000', 'detached_at': '', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'serial': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1223.279210] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d1beea-829c-4186-b5c5-a4d456982dd8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.302637] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb33914-9488-4315-b8d4-43eb5ce7f8d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.310293] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919222d7-89f9-4a9b-a8e9-b0ad7e3dcf60 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.333539] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00d0aa2-46c7-41c4-9abd-d291ecf54ee7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.351621] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] The volume has not been displaced from its original location: [datastore2] volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8/volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1223.357372] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1223.357854] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ae290db-81fe-4a3b-9349-8c002ae2388d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.377928] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1223.377928] env[61839]: value = "task-1315182" [ 1223.377928] env[61839]: _type = "Task" [ 1223.377928] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.387023] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315182, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.400030] env[61839]: DEBUG nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1223.887902] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315182, 'name': ReconfigVM_Task, 'duration_secs': 0.157276} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.888791] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1223.893522] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abaf3ac5-e221-4b3d-ac47-7faa5a8cf890 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.908873] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1223.908873] env[61839]: value = "task-1315183" [ 1223.908873] env[61839]: _type = "Task" [ 1223.908873] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.916723] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.920426] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.920665] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.922272] env[61839]: INFO nova.compute.claims [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1224.419285] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315183, 'name': ReconfigVM_Task, 'duration_secs': 0.393674} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.419595] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281464', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'name': 'volume-14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'af3978c4-3bd4-47da-a188-954bd6385183', 'attached_at': '2024-10-18T17:04:03.000000', 'detached_at': '', 'volume_id': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8', 'serial': '14ce5e6a-8a86-4817-b00b-46cc57cbdfa8'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1224.419883] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1224.420682] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5739178a-e014-4e7a-a877-76f7e4eb1bd5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.429434] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1224.429875] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9327aeaa-d604-45ea-8502-f7e63746c0de {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.495470] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1224.495811] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1224.496031] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleting the datastore file [datastore2] af3978c4-3bd4-47da-a188-954bd6385183 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.496299] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8baa4aa2-ca2f-4dbe-8229-51bde7c5eb7f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.502816] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1224.502816] env[61839]: value = "task-1315185" [ 1224.502816] env[61839]: _type = "Task" [ 1224.502816] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.510832] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.988759] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ed39ff-ce84-4a98-a1a8-3d7bdb62732c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.997729] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b78369b-e7f0-4c47-9534-6abc2ba89c58 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.029133] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21b9d4f-58d5-49eb-8202-a0da6c41b1ab {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.035870] env[61839]: DEBUG oslo_vmware.api [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082581} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.037810] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1225.038028] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1225.038254] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1225.038469] env[61839]: INFO nova.compute.manager [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1225.038732] env[61839]: DEBUG oslo.service.loopingcall [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1225.038981] env[61839]: DEBUG nova.compute.manager [-] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1225.039094] env[61839]: DEBUG nova.network.neutron [-] [instance: af3978c4-3bd4-47da-a188-954bd6385183] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1225.041426] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ee7a41-f4f9-4467-8cf3-e898773c480f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.054418] env[61839]: DEBUG nova.compute.provider_tree [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.438946] env[61839]: DEBUG nova.compute.manager [req-c92af570-97ef-4055-8d97-ef05b073a15b req-783bd196-9738-43da-9e27-64a2a566ec37 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Received event network-vif-deleted-9e2d05ef-d36f-4c9a-980d-457ebeb1e2af {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1225.439010] env[61839]: INFO nova.compute.manager [req-c92af570-97ef-4055-8d97-ef05b073a15b req-783bd196-9738-43da-9e27-64a2a566ec37 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Neutron deleted interface 9e2d05ef-d36f-4c9a-980d-457ebeb1e2af; detaching it from the instance and deleting it from the info cache [ 1225.439210] env[61839]: DEBUG nova.network.neutron [req-c92af570-97ef-4055-8d97-ef05b073a15b req-783bd196-9738-43da-9e27-64a2a566ec37 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.574798] env[61839]: ERROR nova.scheduler.client.report [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [req-f9f64196-8fe3-4c59-b398-b1cf5807f0fe] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cef329e6-1ccd-42a8-bbc4-109a06d1c908. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f9f64196-8fe3-4c59-b398-b1cf5807f0fe"}]} [ 1225.591030] env[61839]: DEBUG nova.scheduler.client.report [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Refreshing inventories for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1225.603429] env[61839]: DEBUG nova.scheduler.client.report [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Updating ProviderTree inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1225.603651] env[61839]: DEBUG nova.compute.provider_tree [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.614137] env[61839]: DEBUG nova.scheduler.client.report [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Refreshing aggregate associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, aggregates: None {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1225.634262] env[61839]: DEBUG nova.scheduler.client.report [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Refreshing trait associations for resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61839) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1225.688652] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba6acca-8a2e-47fa-8d53-ba3272ae9e3d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.696194] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab7b615-63d7-4700-a70d-dc1b1d1f161f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.726680] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9332d8-8a1d-4316-9f96-3fe7c58469f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.733505] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7696d3aa-8f90-4bf5-b9b1-184ecaba0b87 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.746401] env[61839]: DEBUG nova.compute.provider_tree [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.913374] env[61839]: DEBUG nova.network.neutron [-] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.942157] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a888226a-3e31-471d-9160-086f8a6a95f1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.952226] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7ad255-afb9-4709-ae2d-97516c0a5643 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.975375] env[61839]: DEBUG nova.compute.manager [req-c92af570-97ef-4055-8d97-ef05b073a15b req-783bd196-9738-43da-9e27-64a2a566ec37 service nova] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Detach interface failed, port_id=9e2d05ef-d36f-4c9a-980d-457ebeb1e2af, reason: Instance af3978c4-3bd4-47da-a188-954bd6385183 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1226.275212] env[61839]: DEBUG nova.scheduler.client.report [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 148 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1226.275495] env[61839]: DEBUG nova.compute.provider_tree [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 148 to 149 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1226.275685] env[61839]: DEBUG nova.compute.provider_tree [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1226.417353] env[61839]: INFO nova.compute.manager [-] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Took 1.38 seconds to deallocate network for instance. [ 1226.780742] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.860s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.781331] env[61839]: DEBUG nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1226.960400] env[61839]: INFO nova.compute.manager [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Took 0.54 seconds to detach 1 volumes for instance. [ 1226.963614] env[61839]: DEBUG nova.compute.manager [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Deleting volume: 14ce5e6a-8a86-4817-b00b-46cc57cbdfa8 {{(pid=61839) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1227.285979] env[61839]: DEBUG nova.compute.utils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1227.287442] env[61839]: DEBUG nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1227.287644] env[61839]: DEBUG nova.network.neutron [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1227.333546] env[61839]: DEBUG nova.policy [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eee6a611f68c4330978c45483d386341', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14891ffcf4e240c7b870ee7dc63f28c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1227.501117] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.501397] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.501629] env[61839]: DEBUG nova.objects.instance [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'resources' on Instance uuid af3978c4-3bd4-47da-a188-954bd6385183 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.595471] env[61839]: DEBUG nova.network.neutron [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Successfully created port: 7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1227.790497] env[61839]: DEBUG nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1228.066662] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b680a30-0df8-4666-b87c-0a2f63f12114 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.074334] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f7c2bb-3fb8-4b14-a1e4-a5f2bfe29c2e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.103490] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6176bd-99db-4b28-ab8b-5a64522253c8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.110870] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a59d0d6-0a37-4660-ab34-1a0e2393cf7a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.124127] env[61839]: DEBUG nova.compute.provider_tree [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.630615] env[61839]: DEBUG nova.scheduler.client.report [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1228.801205] env[61839]: DEBUG nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1228.826849] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1228.827160] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1228.827386] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.827611] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1228.827820] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.828027] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1228.828287] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1228.828518] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1228.828738] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1228.828945] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1228.829181] env[61839]: DEBUG nova.virt.hardware [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1228.830052] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1bd08c-f021-46f7-a819-c3d07b73dc95 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.837858] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9a6ffd-c662-4e79-950b-897c55b2f19d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.137919] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.636s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.155238] env[61839]: INFO nova.scheduler.client.report [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted allocations for instance af3978c4-3bd4-47da-a188-954bd6385183 [ 1229.227496] env[61839]: DEBUG nova.compute.manager [req-80ff44b9-c40b-49e9-b5be-03945a1484d7 req-27ebeb51-d245-457f-a65e-09b2e1d18442 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Received event network-vif-plugged-7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1229.227789] env[61839]: DEBUG oslo_concurrency.lockutils [req-80ff44b9-c40b-49e9-b5be-03945a1484d7 req-27ebeb51-d245-457f-a65e-09b2e1d18442 service nova] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.228102] env[61839]: DEBUG oslo_concurrency.lockutils [req-80ff44b9-c40b-49e9-b5be-03945a1484d7 req-27ebeb51-d245-457f-a65e-09b2e1d18442 service nova] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.228369] env[61839]: DEBUG oslo_concurrency.lockutils [req-80ff44b9-c40b-49e9-b5be-03945a1484d7 req-27ebeb51-d245-457f-a65e-09b2e1d18442 service nova] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.228658] env[61839]: DEBUG nova.compute.manager [req-80ff44b9-c40b-49e9-b5be-03945a1484d7 req-27ebeb51-d245-457f-a65e-09b2e1d18442 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] No waiting events found dispatching network-vif-plugged-7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1229.229056] env[61839]: WARNING nova.compute.manager [req-80ff44b9-c40b-49e9-b5be-03945a1484d7 req-27ebeb51-d245-457f-a65e-09b2e1d18442 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Received unexpected event network-vif-plugged-7be9f7db-5c07-4100-90ea-1b774db19788 for instance with vm_state building and task_state spawning. [ 1229.333242] env[61839]: DEBUG nova.network.neutron [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Successfully updated port: 7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.663160] env[61839]: DEBUG oslo_concurrency.lockutils [None req-96ac087b-c510-41d8-ab59-88f07b420951 tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "af3978c4-3bd4-47da-a188-954bd6385183" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.908s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.835943] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.836167] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.836272] env[61839]: DEBUG nova.network.neutron [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1230.366048] env[61839]: DEBUG nova.network.neutron [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1230.484222] env[61839]: DEBUG nova.network.neutron [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.555504] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "3e153d8a-e069-443c-9db4-7614a6475971" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.555801] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "3e153d8a-e069-443c-9db4-7614a6475971" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.556037] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "3e153d8a-e069-443c-9db4-7614a6475971-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.556231] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "3e153d8a-e069-443c-9db4-7614a6475971-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.556405] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "3e153d8a-e069-443c-9db4-7614a6475971-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.558467] env[61839]: INFO nova.compute.manager [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Terminating instance [ 1230.560244] env[61839]: DEBUG nova.compute.manager [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1230.560448] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1230.561842] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33663b47-3f16-4d58-a97b-7b2ea282b980 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.570134] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1230.570368] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3d8690e-fc1d-4abc-a31f-b0df109556e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.576626] env[61839]: DEBUG oslo_vmware.api [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1230.576626] env[61839]: value = "task-1315187" [ 1230.576626] env[61839]: _type = "Task" [ 1230.576626] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.583903] env[61839]: DEBUG oslo_vmware.api [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.987213] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.987571] env[61839]: DEBUG nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Instance network_info: |[{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1230.988050] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:80:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7be9f7db-5c07-4100-90ea-1b774db19788', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1230.995602] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Creating folder: Project (14891ffcf4e240c7b870ee7dc63f28c7). Parent ref: group-v281288. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1230.995884] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b60c3a9-07a3-43a7-95a9-88fd23138826 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.007321] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Created folder: Project (14891ffcf4e240c7b870ee7dc63f28c7) in parent group-v281288. [ 1231.007504] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Creating folder: Instances. Parent ref: group-v281470. {{(pid=61839) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.007764] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-039c46ff-5e64-4603-b4c4-bea2af4b74c2 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.015887] env[61839]: INFO nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Created folder: Instances in parent group-v281470. [ 1231.016124] env[61839]: DEBUG oslo.service.loopingcall [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1231.016312] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1231.016502] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3016c11a-f034-468f-955e-0195bb64c57f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.036126] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.036126] env[61839]: value = "task-1315190" [ 1231.036126] env[61839]: _type = "Task" [ 1231.036126] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.043213] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315190, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.085325] env[61839]: DEBUG oslo_vmware.api [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315187, 'name': PowerOffVM_Task, 'duration_secs': 0.1837} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.085533] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1231.085705] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1231.085940] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e8936d6-b9dc-433e-94bd-7486f6667028 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.155577] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1231.155833] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Deleting contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1231.156019] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleting the datastore file [datastore2] 3e153d8a-e069-443c-9db4-7614a6475971 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1231.156316] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92f24b0a-7ad1-4420-877a-dd07cf03fd15 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.162660] env[61839]: DEBUG oslo_vmware.api [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1231.162660] env[61839]: value = "task-1315192" [ 1231.162660] env[61839]: _type = "Task" [ 1231.162660] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.170224] env[61839]: DEBUG oslo_vmware.api [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.309146] env[61839]: DEBUG nova.compute.manager [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Received event network-changed-7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1231.309401] env[61839]: DEBUG nova.compute.manager [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Refreshing instance network info cache due to event network-changed-7be9f7db-5c07-4100-90ea-1b774db19788. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1231.309649] env[61839]: DEBUG oslo_concurrency.lockutils [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] Acquiring lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.309745] env[61839]: DEBUG oslo_concurrency.lockutils [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] Acquired lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.309948] env[61839]: DEBUG nova.network.neutron [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Refreshing network info cache for port 7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.545960] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315190, 'name': CreateVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.672819] env[61839]: DEBUG oslo_vmware.api [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146847} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.673083] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1231.673280] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Deleted contents of the VM from datastore datastore2 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1231.673465] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1231.673648] env[61839]: INFO nova.compute.manager [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1231.673901] env[61839]: DEBUG oslo.service.loopingcall [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1231.674122] env[61839]: DEBUG nova.compute.manager [-] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1231.674213] env[61839]: DEBUG nova.network.neutron [-] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1232.027073] env[61839]: DEBUG nova.network.neutron [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updated VIF entry in instance network info cache for port 7be9f7db-5c07-4100-90ea-1b774db19788. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1232.027471] env[61839]: DEBUG nova.network.neutron [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.046938] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315190, 'name': CreateVM_Task, 'duration_secs': 0.8111} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.047108] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1232.047777] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.047961] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.048329] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1232.048582] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26ab6091-0d8c-49a4-b021-0909b7631567 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.053007] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1232.053007] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520d79ff-f481-4f27-31f6-9ca27a404cd2" [ 1232.053007] env[61839]: _type = "Task" [ 1232.053007] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.061063] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520d79ff-f481-4f27-31f6-9ca27a404cd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.400940] env[61839]: DEBUG nova.network.neutron [-] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.529944] env[61839]: DEBUG oslo_concurrency.lockutils [req-963b0528-ffd6-44be-a554-c6c930f995cb req-9c7c273a-590a-4ab5-885f-6bec5830e850 service nova] Releasing lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.563290] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]520d79ff-f481-4f27-31f6-9ca27a404cd2, 'name': SearchDatastore_Task, 'duration_secs': 0.00973} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.563584] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.563814] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.564062] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.564221] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.564407] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.564659] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bddf692f-b5d2-4aa6-b05c-ccaea125472a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.572724] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.572902] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1232.573596] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bd0b122-119b-4956-8134-e348928faffa {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.578183] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1232.578183] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528f00d4-f9d5-9cab-7a59-614246814908" [ 1232.578183] env[61839]: _type = "Task" [ 1232.578183] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.586282] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528f00d4-f9d5-9cab-7a59-614246814908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.904595] env[61839]: INFO nova.compute.manager [-] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Took 1.23 seconds to deallocate network for instance. [ 1233.088677] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528f00d4-f9d5-9cab-7a59-614246814908, 'name': SearchDatastore_Task, 'duration_secs': 0.008031} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.089407] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05bd80d9-153d-4af5-899f-aa3b15c95cfd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.094456] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1233.094456] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528574c5-8aff-69ba-3705-7ca98fecf44b" [ 1233.094456] env[61839]: _type = "Task" [ 1233.094456] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.101878] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528574c5-8aff-69ba-3705-7ca98fecf44b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.338951] env[61839]: DEBUG nova.compute.manager [req-d228690d-9614-44d6-ae07-342a2e121fc0 req-1469a1a3-81ec-4d3d-95b4-29328932f3a2 service nova] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Received event network-vif-deleted-3e08c400-de17-4651-a33f-716a238d9cff {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1233.411962] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.411962] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.411962] env[61839]: DEBUG nova.objects.instance [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'resources' on Instance uuid 3e153d8a-e069-443c-9db4-7614a6475971 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.605838] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]528574c5-8aff-69ba-3705-7ca98fecf44b, 'name': SearchDatastore_Task, 'duration_secs': 0.009518} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.605968] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.606137] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] b0908d71-8a80-475d-b2c4-ba68ad93a91d/b0908d71-8a80-475d-b2c4-ba68ad93a91d.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1233.606394] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb652a64-278f-4a91-9c62-758be0c5ef44 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.614315] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1233.614315] env[61839]: value = "task-1315193" [ 1233.614315] env[61839]: _type = "Task" [ 1233.614315] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.621614] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.979871] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd85edb-e778-44ae-8707-ca00e6565f86 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.988981] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2bb40e-1395-4040-b030-d0b6a1345da3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.024573] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3410d255-1e35-4a85-babd-d45b20975ebd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.033959] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d32de83-e56f-4fef-b992-520d5bc95df0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.049249] env[61839]: DEBUG nova.compute.provider_tree [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.124166] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435373} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.124480] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] b0908d71-8a80-475d-b2c4-ba68ad93a91d/b0908d71-8a80-475d-b2c4-ba68ad93a91d.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1234.124703] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1234.124915] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2164f3dc-edf1-47db-8c2a-5c3f08ecf4dd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.131037] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1234.131037] env[61839]: value = "task-1315194" [ 1234.131037] env[61839]: _type = "Task" [ 1234.131037] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.138619] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.552604] env[61839]: DEBUG nova.scheduler.client.report [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1234.640844] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065716} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.641148] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1234.641907] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69789191-0e84-4ac1-9faa-420880a82fc3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.662852] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] b0908d71-8a80-475d-b2c4-ba68ad93a91d/b0908d71-8a80-475d-b2c4-ba68ad93a91d.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1234.663098] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99e419f5-061a-4bf5-bcd1-a3da36be4953 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.681675] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1234.681675] env[61839]: value = "task-1315195" [ 1234.681675] env[61839]: _type = "Task" [ 1234.681675] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.688794] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315195, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.058051] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.646s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.074655] env[61839]: INFO nova.scheduler.client.report [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted allocations for instance 3e153d8a-e069-443c-9db4-7614a6475971 [ 1235.191641] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315195, 'name': ReconfigVM_Task, 'duration_secs': 0.265739} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.192032] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Reconfigured VM instance instance-00000071 to attach disk [datastore1] b0908d71-8a80-475d-b2c4-ba68ad93a91d/b0908d71-8a80-475d-b2c4-ba68ad93a91d.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1235.192564] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d45afa4-7606-451f-8166-9bbd531d7e3b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.199544] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1235.199544] env[61839]: value = "task-1315196" [ 1235.199544] env[61839]: _type = "Task" [ 1235.199544] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.206748] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315196, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.584051] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3c5bfb08-1e14-4068-a914-e4d0121bdc0d tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "3e153d8a-e069-443c-9db4-7614a6475971" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.028s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.708705] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315196, 'name': Rename_Task, 'duration_secs': 0.153142} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.708989] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1235.709253] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b075f8b1-3d2d-4842-aa3a-64aac9a2911e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.715054] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1235.715054] env[61839]: value = "task-1315198" [ 1235.715054] env[61839]: _type = "Task" [ 1235.715054] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.722536] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315198, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.226960] env[61839]: DEBUG oslo_vmware.api [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315198, 'name': PowerOnVM_Task, 'duration_secs': 0.417357} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.227309] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1236.227468] env[61839]: INFO nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Took 7.43 seconds to spawn the instance on the hypervisor. [ 1236.227669] env[61839]: DEBUG nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1236.228457] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854d5495-52ad-4691-9ec5-a63cb12e59b8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.744706] env[61839]: INFO nova.compute.manager [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Took 12.84 seconds to build instance. [ 1237.247090] env[61839]: DEBUG oslo_concurrency.lockutils [None req-9df096dc-bb50-4537-a62f-333451333433 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.350s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.286502] env[61839]: DEBUG nova.compute.manager [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Received event network-changed-7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1237.286712] env[61839]: DEBUG nova.compute.manager [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Refreshing instance network info cache due to event network-changed-7be9f7db-5c07-4100-90ea-1b774db19788. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1237.286941] env[61839]: DEBUG oslo_concurrency.lockutils [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] Acquiring lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.287108] env[61839]: DEBUG oslo_concurrency.lockutils [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] Acquired lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.287282] env[61839]: DEBUG nova.network.neutron [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Refreshing network info cache for port 7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1237.992772] env[61839]: DEBUG nova.network.neutron [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updated VIF entry in instance network info cache for port 7be9f7db-5c07-4100-90ea-1b774db19788. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1237.993169] env[61839]: DEBUG nova.network.neutron [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.496434] env[61839]: DEBUG oslo_concurrency.lockutils [req-772b6159-044d-458e-88ef-192653bee49e req-f4f0470e-86e7-4f5f-8e33-77340476157e service nova] Releasing lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.233962] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.234299] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.234522] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.234709] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.234886] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.237172] env[61839]: INFO nova.compute.manager [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Terminating instance [ 1239.238925] env[61839]: DEBUG nova.compute.manager [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1239.239146] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1239.239982] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc52b85f-80e3-4464-8c54-6cede6ffa566 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.248386] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1239.248659] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e81b345-6d01-49a0-bb9b-ec3bfe0c4ffc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.256165] env[61839]: DEBUG oslo_vmware.api [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1239.256165] env[61839]: value = "task-1315199" [ 1239.256165] env[61839]: _type = "Task" [ 1239.256165] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.264555] env[61839]: DEBUG oslo_vmware.api [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.767102] env[61839]: DEBUG oslo_vmware.api [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315199, 'name': PowerOffVM_Task, 'duration_secs': 0.242363} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.767501] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1239.767645] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1239.767850] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6b39e6c-b8ca-4617-b971-b08174caa512 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.855020] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1239.855275] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1239.855465] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleting the datastore file [datastore1] 7f7b3f51-3e96-49f1-a84a-81ae649e6938 {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1239.855825] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4ab1a37-d49e-47cf-b12b-968882311ccd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.861843] env[61839]: DEBUG oslo_vmware.api [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for the task: (returnval){ [ 1239.861843] env[61839]: value = "task-1315201" [ 1239.861843] env[61839]: _type = "Task" [ 1239.861843] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.869340] env[61839]: DEBUG oslo_vmware.api [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.372071] env[61839]: DEBUG oslo_vmware.api [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.873021] env[61839]: DEBUG oslo_vmware.api [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Task: {'id': task-1315201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.792454} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.873457] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1240.873637] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1240.873824] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1240.874019] env[61839]: INFO nova.compute.manager [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1240.874272] env[61839]: DEBUG oslo.service.loopingcall [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1240.874467] env[61839]: DEBUG nova.compute.manager [-] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1240.874562] env[61839]: DEBUG nova.network.neutron [-] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1241.125193] env[61839]: DEBUG nova.compute.manager [req-b0221314-ea39-4f32-89c0-2dd1e627e23b req-a981e364-06b1-42ac-aa86-7fa3d0c07c1d service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Received event network-vif-deleted-6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1241.125302] env[61839]: INFO nova.compute.manager [req-b0221314-ea39-4f32-89c0-2dd1e627e23b req-a981e364-06b1-42ac-aa86-7fa3d0c07c1d service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Neutron deleted interface 6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd; detaching it from the instance and deleting it from the info cache [ 1241.125480] env[61839]: DEBUG nova.network.neutron [req-b0221314-ea39-4f32-89c0-2dd1e627e23b req-a981e364-06b1-42ac-aa86-7fa3d0c07c1d service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.605576] env[61839]: DEBUG nova.network.neutron [-] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.629606] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a1f3115-a5e0-4baa-ab55-1bf1e42f5037 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.638882] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732c5e19-7cb4-44c9-8890-35ed4d0cc7d3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.661360] env[61839]: DEBUG nova.compute.manager [req-b0221314-ea39-4f32-89c0-2dd1e627e23b req-a981e364-06b1-42ac-aa86-7fa3d0c07c1d service nova] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Detach interface failed, port_id=6b6f5c89-f3fe-4e29-82b8-e9e8f2658bfd, reason: Instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1242.108692] env[61839]: INFO nova.compute.manager [-] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Took 1.23 seconds to deallocate network for instance. [ 1242.616254] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.616547] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.616777] env[61839]: DEBUG nova.objects.instance [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lazy-loading 'resources' on Instance uuid 7f7b3f51-3e96-49f1-a84a-81ae649e6938 {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.160784] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55de14e-0d66-475f-b07a-a535a7a09b0f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.169583] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b3755b-6c44-4a3a-bffd-8b4100c71e62 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.200771] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7fc75b-bdb2-4f17-ab1c-e3eadd910fcb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.208873] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780357cd-d85c-47ac-a832-17876aa777bc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.222536] env[61839]: DEBUG nova.compute.provider_tree [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1243.755673] env[61839]: DEBUG nova.scheduler.client.report [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 149 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1243.756051] env[61839]: DEBUG nova.compute.provider_tree [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 149 to 150 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1243.756336] env[61839]: DEBUG nova.compute.provider_tree [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1244.263050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.645s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.279536] env[61839]: INFO nova.scheduler.client.report [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Deleted allocations for instance 7f7b3f51-3e96-49f1-a84a-81ae649e6938 [ 1244.786897] env[61839]: DEBUG oslo_concurrency.lockutils [None req-02793324-3e95-4359-b2a1-85c3452641bc tempest-ServerActionsTestOtherA-731553444 tempest-ServerActionsTestOtherA-731553444-project-member] Lock "7f7b3f51-3e96-49f1-a84a-81ae649e6938" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.552s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.489975] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.489941] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.490052] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.992880] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.993172] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.993347] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.993508] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1253.994416] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73c68f1-3fec-48c4-9268-128b787a25f5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.003167] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12873d20-e974-4fb6-8ac3-0bb610f9e075 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.016911] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2b03ae-99e3-4988-adae-7e14ecfe0fc7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.023200] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a0d045-2f52-4f53-b685-97969758186b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.052107] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181069MB free_disk=134GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1254.052283] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.052436] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.076499] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance b0908d71-8a80-475d-b2c4-ba68ad93a91d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.076778] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1255.077028] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1255.102551] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d14c9b-1b57-4649-8fcb-d00128ae4250 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.109920] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4393e3f-105b-42ed-b52a-7191bbf8d855 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.138471] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc0f6c5-f649-4f15-8a43-33292f0c52c3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.145280] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e75632-9885-469f-b16f-733e9686352f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.158818] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.661949] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 134, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1256.166982] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1256.167389] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.167916] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.168248] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1257.168286] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.697545] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.697707] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.697856] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1257.698045] env[61839]: DEBUG nova.objects.instance [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lazy-loading 'info_cache' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.417147] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.919905] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.920161] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1259.920400] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1259.920583] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1259.920745] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.488907] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.489359] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.489359] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1276.386373] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.386869] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.890317] env[61839]: DEBUG nova.compute.utils [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1277.393692] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.455274] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.455648] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.455831] env[61839]: INFO nova.compute.manager [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Attaching volume 26700da1-b262-49fc-a2d4-bad51042cd67 to /dev/sdb [ 1278.485869] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a72cb2-50c0-4305-9ea9-9dbf3c42ca8b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.493236] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13cf4de-9c1d-40d8-82fc-505cbccd1999 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.508190] env[61839]: DEBUG nova.virt.block_device [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating existing volume attachment record: 34b54d54-e3d2-4b87-8946-e693f8a06099 {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1283.049907] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1283.050186] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281474', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'name': 'volume-26700da1-b262-49fc-a2d4-bad51042cd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b0908d71-8a80-475d-b2c4-ba68ad93a91d', 'attached_at': '', 'detached_at': '', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'serial': '26700da1-b262-49fc-a2d4-bad51042cd67'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1283.051084] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aabaab-35a4-4ba3-91f6-38d9e58f508d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.067114] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a865b718-dc70-4e85-b242-0506b945a444 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.090166] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-26700da1-b262-49fc-a2d4-bad51042cd67/volume-26700da1-b262-49fc-a2d4-bad51042cd67.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1283.090383] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d40a9f7-c8d5-4d7b-84fa-bc19d1f0c25b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.107934] env[61839]: DEBUG oslo_vmware.api [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1283.107934] env[61839]: value = "task-1315206" [ 1283.107934] env[61839]: _type = "Task" [ 1283.107934] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.115111] env[61839]: DEBUG oslo_vmware.api [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315206, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.617334] env[61839]: DEBUG oslo_vmware.api [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315206, 'name': ReconfigVM_Task, 'duration_secs': 0.389284} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.617928] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-26700da1-b262-49fc-a2d4-bad51042cd67/volume-26700da1-b262-49fc-a2d4-bad51042cd67.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1283.622693] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fb88097-3dea-4119-9cfc-79c3bec3dcd4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.636713] env[61839]: DEBUG oslo_vmware.api [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1283.636713] env[61839]: value = "task-1315207" [ 1283.636713] env[61839]: _type = "Task" [ 1283.636713] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.643845] env[61839]: DEBUG oslo_vmware.api [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315207, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.148054] env[61839]: DEBUG oslo_vmware.api [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315207, 'name': ReconfigVM_Task, 'duration_secs': 0.133779} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.148054] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281474', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'name': 'volume-26700da1-b262-49fc-a2d4-bad51042cd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b0908d71-8a80-475d-b2c4-ba68ad93a91d', 'attached_at': '', 'detached_at': '', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'serial': '26700da1-b262-49fc-a2d4-bad51042cd67'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1285.183054] env[61839]: DEBUG nova.objects.instance [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1285.688933] env[61839]: DEBUG oslo_concurrency.lockutils [None req-4fcdcd9b-026c-4370-b361-0d49341c5f02 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.233s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.775442] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.775692] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.775876] env[61839]: DEBUG nova.compute.manager [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1285.776821] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757245d4-58cc-4891-ac32-eaa72af524e8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.783753] env[61839]: DEBUG nova.compute.manager [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61839) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1285.784318] env[61839]: DEBUG nova.objects.instance [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.289403] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1286.289846] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f58fa906-109a-4e81-914b-26ea45d5481a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.298058] env[61839]: DEBUG oslo_vmware.api [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1286.298058] env[61839]: value = "task-1315208" [ 1286.298058] env[61839]: _type = "Task" [ 1286.298058] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.305698] env[61839]: DEBUG oslo_vmware.api [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.807822] env[61839]: DEBUG oslo_vmware.api [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315208, 'name': PowerOffVM_Task, 'duration_secs': 0.211191} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.808105] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.808337] env[61839]: DEBUG nova.compute.manager [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1286.809087] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105accf6-7ccb-4206-bff9-2c92f5210e94 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.320049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f6d1e1d8-9353-44a0-8dd7-fdad6b62c45f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.143667] env[61839]: DEBUG nova.objects.instance [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.648716] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.649130] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.649130] env[61839]: DEBUG nova.network.neutron [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1288.649269] env[61839]: DEBUG nova.objects.instance [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'info_cache' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1289.153236] env[61839]: DEBUG nova.objects.base [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1289.877820] env[61839]: DEBUG nova.network.neutron [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.380969] env[61839]: DEBUG oslo_concurrency.lockutils [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.884705] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.885056] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-856d3d9d-b924-454c-83b2-01489b8b9e25 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.892174] env[61839]: DEBUG oslo_vmware.api [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1290.892174] env[61839]: value = "task-1315209" [ 1290.892174] env[61839]: _type = "Task" [ 1290.892174] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.899723] env[61839]: DEBUG oslo_vmware.api [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.402256] env[61839]: DEBUG oslo_vmware.api [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315209, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.902631] env[61839]: DEBUG oslo_vmware.api [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315209, 'name': PowerOnVM_Task, 'duration_secs': 0.79233} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.903068] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1291.903135] env[61839]: DEBUG nova.compute.manager [None req-3f393c58-e50f-4da7-aad6-04545ae844f2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1291.903920] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b3276a-58ed-47a2-bc28-c341d1dc841d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.490677] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.491096] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.490058] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.992966] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.993346] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.993480] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.993584] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1313.994535] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0eeffde-fc19-4e43-93a7-ac125709e614 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.002835] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0098ce9b-9a51-4611-9eb5-a20c4d5f8c64 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.017381] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679c7055-028c-4369-bbd7-5662841cf6a4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.023530] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ffc46a-5b44-400f-a34c-e86bf883f6c9 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.050817] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181328MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1314.050982] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.051158] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.075813] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance b0908d71-8a80-475d-b2c4-ba68ad93a91d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.076105] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1315.076182] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1315.102834] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc4b3a2-9cd0-4fbf-9ad9-9adbd2f05757 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.110641] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6677128-0ec8-4c4e-a05e-09b12c38eb2c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.140097] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a068d12-532d-449b-8189-51f063146b04 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.146845] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ebceed-282a-413b-b7cb-8ed21f10dd55 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.159533] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1315.690503] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updated inventory for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1315.690775] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating resource provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 generation from 150 to 151 during operation: update_inventory {{(pid=61839) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1315.690948] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Updating inventory in ProviderTree for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1315.692334] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1315.692518] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.641s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.692546] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.693075] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1316.693075] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1317.222158] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.222316] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.222464] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1317.222618] env[61839]: DEBUG nova.objects.instance [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lazy-loading 'info_cache' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.940997] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.443921] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1319.444268] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1319.444491] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.444663] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.489806] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1321.485593] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.489916] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.489916] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1323.486073] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1328.606497] env[61839]: DEBUG oslo_concurrency.lockutils [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.606929] env[61839]: DEBUG oslo_concurrency.lockutils [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.110464] env[61839]: INFO nova.compute.manager [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Detaching volume 26700da1-b262-49fc-a2d4-bad51042cd67 [ 1329.140894] env[61839]: INFO nova.virt.block_device [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Attempting to driver detach volume 26700da1-b262-49fc-a2d4-bad51042cd67 from mountpoint /dev/sdb [ 1329.141163] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1329.141357] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281474', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'name': 'volume-26700da1-b262-49fc-a2d4-bad51042cd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b0908d71-8a80-475d-b2c4-ba68ad93a91d', 'attached_at': '', 'detached_at': '', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'serial': '26700da1-b262-49fc-a2d4-bad51042cd67'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1329.142242] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04745618-c00a-4a4a-9993-851a276bf057 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.163561] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccdb53a-7cad-40d1-9703-76e70f5ce199 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.170465] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba46669-d17f-4d23-8097-47ed7e5d1d18 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.190184] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62cec0f-9336-4aec-93f5-9dcdf813faad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.204087] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] The volume has not been displaced from its original location: [datastore2] volume-26700da1-b262-49fc-a2d4-bad51042cd67/volume-26700da1-b262-49fc-a2d4-bad51042cd67.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1329.209277] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1329.209532] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-635c0ed9-5614-4600-b0f3-1978d0eb654a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.228173] env[61839]: DEBUG oslo_vmware.api [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1329.228173] env[61839]: value = "task-1315210" [ 1329.228173] env[61839]: _type = "Task" [ 1329.228173] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.235493] env[61839]: DEBUG oslo_vmware.api [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315210, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.737534] env[61839]: DEBUG oslo_vmware.api [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315210, 'name': ReconfigVM_Task, 'duration_secs': 0.217559} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.737924] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1329.742412] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d44cfc6-cfff-49ec-80ef-072969bc90f3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.757530] env[61839]: DEBUG oslo_vmware.api [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1329.757530] env[61839]: value = "task-1315211" [ 1329.757530] env[61839]: _type = "Task" [ 1329.757530] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.764958] env[61839]: DEBUG oslo_vmware.api [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.268421] env[61839]: DEBUG oslo_vmware.api [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315211, 'name': ReconfigVM_Task, 'duration_secs': 0.130065} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.268710] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281474', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'name': 'volume-26700da1-b262-49fc-a2d4-bad51042cd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b0908d71-8a80-475d-b2c4-ba68ad93a91d', 'attached_at': '', 'detached_at': '', 'volume_id': '26700da1-b262-49fc-a2d4-bad51042cd67', 'serial': '26700da1-b262-49fc-a2d4-bad51042cd67'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1330.808423] env[61839]: DEBUG nova.objects.instance [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1331.777637] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.815804] env[61839]: DEBUG oslo_concurrency.lockutils [None req-43a4331b-fed6-46c5-8166-fb8f9210fbf5 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.817426] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.040s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.817628] env[61839]: DEBUG nova.compute.manager [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1331.818565] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95558e4a-37e8-4e33-aeea-b218ccc0e7a8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.825824] env[61839]: DEBUG nova.compute.manager [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61839) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1331.826393] env[61839]: DEBUG nova.objects.instance [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.331638] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1332.331908] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddb85976-3897-41da-8744-3d5c3314ed16 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.339320] env[61839]: DEBUG oslo_vmware.api [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1332.339320] env[61839]: value = "task-1315212" [ 1332.339320] env[61839]: _type = "Task" [ 1332.339320] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.346986] env[61839]: DEBUG oslo_vmware.api [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.849066] env[61839]: DEBUG oslo_vmware.api [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315212, 'name': PowerOffVM_Task, 'duration_secs': 0.196131} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.849431] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.849536] env[61839]: DEBUG nova.compute.manager [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1332.850250] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf2f1ab-bac6-4efd-9c56-a8f990addd87 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.362050] env[61839]: DEBUG oslo_concurrency.lockutils [None req-f7be2aee-a194-4d9a-8e59-8e9aecf76401 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.143760] env[61839]: DEBUG nova.objects.instance [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1334.649096] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.649333] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.649477] env[61839]: DEBUG nova.network.neutron [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1334.649661] env[61839]: DEBUG nova.objects.instance [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'info_cache' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1335.154099] env[61839]: DEBUG nova.objects.base [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61839) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1335.869875] env[61839]: DEBUG nova.network.neutron [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [{"id": "7be9f7db-5c07-4100-90ea-1b774db19788", "address": "fa:16:3e:7f:80:33", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be9f7db-5c", "ovs_interfaceid": "7be9f7db-5c07-4100-90ea-1b774db19788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.372720] env[61839]: DEBUG oslo_concurrency.lockutils [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "refresh_cache-b0908d71-8a80-475d-b2c4-ba68ad93a91d" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.876800] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1336.877144] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-791a57b9-2db7-4b85-8a44-18ce59fb3e66 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.885210] env[61839]: DEBUG oslo_vmware.api [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1336.885210] env[61839]: value = "task-1315213" [ 1336.885210] env[61839]: _type = "Task" [ 1336.885210] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.893080] env[61839]: DEBUG oslo_vmware.api [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.395778] env[61839]: DEBUG oslo_vmware.api [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315213, 'name': PowerOnVM_Task, 'duration_secs': 0.414799} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.396169] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1337.396261] env[61839]: DEBUG nova.compute.manager [None req-8f18c3f7-7a34-441e-bd18-dd280c416b08 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1337.397033] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c15db3-1eeb-4db9-a0e3-f2306d8d8421 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.724635] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.725030] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.725146] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.725344] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.725521] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.729063] env[61839]: INFO nova.compute.manager [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Terminating instance [ 1368.730945] env[61839]: DEBUG nova.compute.manager [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1368.731153] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1368.731965] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c62997-1a84-411b-8cd3-812accaaa459 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.740695] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1368.740909] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-541cff84-042d-457c-8a88-47f0d5d112fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.750558] env[61839]: DEBUG oslo_vmware.api [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1368.750558] env[61839]: value = "task-1315214" [ 1368.750558] env[61839]: _type = "Task" [ 1368.750558] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.757872] env[61839]: DEBUG oslo_vmware.api [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.260491] env[61839]: DEBUG oslo_vmware.api [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315214, 'name': PowerOffVM_Task, 'duration_secs': 0.168553} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.260766] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1369.260943] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1369.261207] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a9e7199-571d-4bae-a719-c52069ee68dc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.322118] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1369.322352] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1369.322538] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Deleting the datastore file [datastore1] b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1369.322796] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d5f37e0-e1d7-4b07-8ad4-e735e8d05b8f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.329217] env[61839]: DEBUG oslo_vmware.api [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1369.329217] env[61839]: value = "task-1315216" [ 1369.329217] env[61839]: _type = "Task" [ 1369.329217] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.336810] env[61839]: DEBUG oslo_vmware.api [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.839401] env[61839]: DEBUG oslo_vmware.api [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164673} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.839785] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1369.839977] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1369.840198] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1369.840390] env[61839]: INFO nova.compute.manager [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1369.840641] env[61839]: DEBUG oslo.service.loopingcall [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1369.840837] env[61839]: DEBUG nova.compute.manager [-] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1369.840933] env[61839]: DEBUG nova.network.neutron [-] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1370.272222] env[61839]: DEBUG nova.compute.manager [req-8aa7bd7c-33a6-44f2-9e76-0bde1fa1aa8f req-1dd7b09b-0e5b-41d5-8039-972381da2a51 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Received event network-vif-deleted-7be9f7db-5c07-4100-90ea-1b774db19788 {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1370.272351] env[61839]: INFO nova.compute.manager [req-8aa7bd7c-33a6-44f2-9e76-0bde1fa1aa8f req-1dd7b09b-0e5b-41d5-8039-972381da2a51 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Neutron deleted interface 7be9f7db-5c07-4100-90ea-1b774db19788; detaching it from the instance and deleting it from the info cache [ 1370.272508] env[61839]: DEBUG nova.network.neutron [req-8aa7bd7c-33a6-44f2-9e76-0bde1fa1aa8f req-1dd7b09b-0e5b-41d5-8039-972381da2a51 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.748462] env[61839]: DEBUG nova.network.neutron [-] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.775434] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59123a09-18b8-4a52-b496-2cd50dabfd9e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.787390] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b47a3c5-5fc6-486f-bddd-47cfee16ae7c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.808902] env[61839]: DEBUG nova.compute.manager [req-8aa7bd7c-33a6-44f2-9e76-0bde1fa1aa8f req-1dd7b09b-0e5b-41d5-8039-972381da2a51 service nova] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Detach interface failed, port_id=7be9f7db-5c07-4100-90ea-1b774db19788, reason: Instance b0908d71-8a80-475d-b2c4-ba68ad93a91d could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1371.251594] env[61839]: INFO nova.compute.manager [-] [instance: b0908d71-8a80-475d-b2c4-ba68ad93a91d] Took 1.41 seconds to deallocate network for instance. [ 1371.489646] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.758187] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.758477] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.758702] env[61839]: DEBUG nova.objects.instance [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'resources' on Instance uuid b0908d71-8a80-475d-b2c4-ba68ad93a91d {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1372.292059] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4931458-1d56-4154-9a80-6b704794f633 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.299428] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a368eb5-1a14-4560-84fe-b0d844b856e5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.329019] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd06d52-0206-4117-b30e-4400c49a0568 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.335654] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f0179f-49d7-4c44-9361-1595e2ff69f7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.348603] env[61839]: DEBUG nova.compute.provider_tree [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.489817] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.489979] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Cleaning up deleted instances {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1372.851797] env[61839]: DEBUG nova.scheduler.client.report [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1372.995201] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] There are 11 instances to clean {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1372.995392] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: af3978c4-3bd4-47da-a188-954bd6385183] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1373.356197] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.376561] env[61839]: INFO nova.scheduler.client.report [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Deleted allocations for instance b0908d71-8a80-475d-b2c4-ba68ad93a91d [ 1373.498151] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: fa0cfb44-5ebf-4472-af93-8f8c518714fd] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1373.886042] env[61839]: DEBUG oslo_concurrency.lockutils [None req-d883e041-90ec-4c10-9347-37e9176b6665 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "b0908d71-8a80-475d-b2c4-ba68ad93a91d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.161s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.001060] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 3e153d8a-e069-443c-9db4-7614a6475971] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1374.504097] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 62af556c-c9b1-4de6-bb07-532ba67fa367] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1375.008400] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 506ea987-ea38-444b-81b7-f5343de14e4f] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1375.510941] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 6b12ef55-b566-4a74-a794-b4e4c41debe1] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1375.632340] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.632340] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.014971] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: dbd34858-9806-4d3f-b829-948651056da2] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1376.133991] env[61839]: DEBUG nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Starting instance... {{(pid=61839) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1376.518207] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 25c574c4-e39b-4009-a562-a4a5bf74a40c] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1376.651420] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.651673] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.653159] env[61839]: INFO nova.compute.claims [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.022048] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: a1defab7-8433-411d-b7e2-c31f6a34b8e0] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1377.524413] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 625a8fc1-23fc-4035-855f-3d3a963cdcea] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1377.685525] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ef1486-018e-431c-9961-cf901cdedc58 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.693298] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e167ca-9931-4e6a-88c1-d7dcde4cfc1e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.722279] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d0e0b6-fef2-47bb-b484-7a3726f3e360 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.729151] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94446a4-34dc-42aa-97e8-55041d59ad27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.741651] env[61839]: DEBUG nova.compute.provider_tree [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.027609] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 7f7b3f51-3e96-49f1-a84a-81ae649e6938] Instance has had 0 of 5 cleanup attempts {{(pid=61839) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1378.244779] env[61839]: DEBUG nova.scheduler.client.report [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1378.531343] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1378.749604] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.098s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.750130] env[61839]: DEBUG nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Start building networks asynchronously for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1379.255391] env[61839]: DEBUG nova.compute.utils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1379.257349] env[61839]: DEBUG nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Allocating IP information in the background. {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1379.257558] env[61839]: DEBUG nova.network.neutron [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] allocate_for_instance() {{(pid=61839) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1379.315197] env[61839]: DEBUG nova.policy [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eee6a611f68c4330978c45483d386341', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14891ffcf4e240c7b870ee7dc63f28c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61839) authorize /opt/stack/nova/nova/policy.py:201}} [ 1379.596984] env[61839]: DEBUG nova.network.neutron [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Successfully created port: 3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.761779] env[61839]: DEBUG nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Start building block device mappings for instance. {{(pid=61839) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1380.033066] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.033262] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1380.033382] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.536147] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Skipping network cache update for instance because it is Building. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1380.536335] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Didn't find any instances for network info cache update. {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1380.536544] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.536704] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.536862] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.537024] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.771892] env[61839]: DEBUG nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Start spawning the instance on the hypervisor. {{(pid=61839) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1380.798389] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-18T16:51:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-18T16:50:57Z,direct_url=,disk_format='vmdk',id=e497cc62-282a-4a70-9770-22d80d8a1013,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a98e034276e44534a9feace637762da7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-18T16:50:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1380.798645] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Flavor limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1380.798807] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Image limits 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1380.798992] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Flavor pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1380.799157] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Image pref 0:0:0 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1380.799336] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61839) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1380.799554] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1380.799719] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1380.799889] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Got 1 possible topologies {{(pid=61839) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1380.800069] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1380.800254] env[61839]: DEBUG nova.virt.hardware [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61839) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1380.801117] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c1134f-e719-4ff7-beab-f882c27a8b22 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.808915] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe8b81a-c0b5-49b3-b2f2-b40f96260a88 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.953378] env[61839]: DEBUG nova.compute.manager [req-825cad2f-d4e5-492d-bd74-4b1ed26eebe4 req-6905de36-b383-4dca-9df9-6a1fb9d12ca8 service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Received event network-vif-plugged-3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1380.953611] env[61839]: DEBUG oslo_concurrency.lockutils [req-825cad2f-d4e5-492d-bd74-4b1ed26eebe4 req-6905de36-b383-4dca-9df9-6a1fb9d12ca8 service nova] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.953831] env[61839]: DEBUG oslo_concurrency.lockutils [req-825cad2f-d4e5-492d-bd74-4b1ed26eebe4 req-6905de36-b383-4dca-9df9-6a1fb9d12ca8 service nova] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.954023] env[61839]: DEBUG oslo_concurrency.lockutils [req-825cad2f-d4e5-492d-bd74-4b1ed26eebe4 req-6905de36-b383-4dca-9df9-6a1fb9d12ca8 service nova] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.954520] env[61839]: DEBUG nova.compute.manager [req-825cad2f-d4e5-492d-bd74-4b1ed26eebe4 req-6905de36-b383-4dca-9df9-6a1fb9d12ca8 service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] No waiting events found dispatching network-vif-plugged-3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1380.954716] env[61839]: WARNING nova.compute.manager [req-825cad2f-d4e5-492d-bd74-4b1ed26eebe4 req-6905de36-b383-4dca-9df9-6a1fb9d12ca8 service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Received unexpected event network-vif-plugged-3624d944-a5f5-4931-ab01-2ac46a579a1d for instance with vm_state building and task_state spawning. [ 1381.036618] env[61839]: DEBUG nova.network.neutron [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Successfully updated port: 3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.039743] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.039956] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.040128] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.040332] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1381.041211] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68da6d40-7bb7-4906-8fb9-855ab76d01cd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.049727] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84786496-b149-4181-8b39-74bcdc94493b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.063585] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccde8f8f-9f51-4b66-801e-9de0a48cdc46 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.069742] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74da11a0-a676-4c87-bbdd-e41bee9088c5 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.100221] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181324MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1381.100365] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.100550] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.539369] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.539526] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.539682] env[61839]: DEBUG nova.network.neutron [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Building network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.069325] env[61839]: DEBUG nova.network.neutron [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Instance cache missing network info. {{(pid=61839) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.187028] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1382.187268] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1382.187428] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1382.190113] env[61839]: DEBUG nova.network.neutron [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating instance_info_cache with network_info: [{"id": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "address": "fa:16:3e:06:20:57", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3624d944-a5", "ovs_interfaceid": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.213945] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13b6836-30c0-4149-8add-21326dfaea3d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.221355] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ae497f-c281-44d1-9574-e0cf55fe2b10 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.249724] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8986ef5-0aab-4cf9-8218-62fb65127c8a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.256681] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb73f35-71a3-4d30-a2f5-6d3594382727 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.269271] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.693895] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.694251] env[61839]: DEBUG nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Instance network_info: |[{"id": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "address": "fa:16:3e:06:20:57", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3624d944-a5", "ovs_interfaceid": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61839) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1382.694704] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:20:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3624d944-a5f5-4931-ab01-2ac46a579a1d', 'vif_model': 'vmxnet3'}] {{(pid=61839) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1382.703843] env[61839]: DEBUG oslo.service.loopingcall [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.704082] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Creating VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1382.704313] env[61839]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b60d101a-1d99-447e-a477-6d4144d565ad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.723918] env[61839]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1382.723918] env[61839]: value = "task-1315217" [ 1382.723918] env[61839]: _type = "Task" [ 1382.723918] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.731538] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315217, 'name': CreateVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.772555] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.981897] env[61839]: DEBUG nova.compute.manager [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Received event network-changed-3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1382.982139] env[61839]: DEBUG nova.compute.manager [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Refreshing instance network info cache due to event network-changed-3624d944-a5f5-4931-ab01-2ac46a579a1d. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1382.982385] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] Acquiring lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.982560] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] Acquired lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.982730] env[61839]: DEBUG nova.network.neutron [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Refreshing network info cache for port 3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.233851] env[61839]: DEBUG oslo_vmware.api [-] Task: {'id': task-1315217, 'name': CreateVM_Task, 'duration_secs': 0.319972} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.234224] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Created VM on the ESX host {{(pid=61839) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1383.234725] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.234904] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.235265] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1383.235529] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e51c9132-8f5c-4dd8-ac61-d057cd7291a3 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.239692] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1383.239692] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5293f72b-b952-2686-7aa9-9fb2d44fd59a" [ 1383.239692] env[61839]: _type = "Task" [ 1383.239692] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.247440] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5293f72b-b952-2686-7aa9-9fb2d44fd59a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.276731] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1383.276906] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.176s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.663769] env[61839]: DEBUG nova.network.neutron [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updated VIF entry in instance network info cache for port 3624d944-a5f5-4931-ab01-2ac46a579a1d. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1383.664157] env[61839]: DEBUG nova.network.neutron [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating instance_info_cache with network_info: [{"id": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "address": "fa:16:3e:06:20:57", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3624d944-a5", "ovs_interfaceid": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.749834] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]5293f72b-b952-2686-7aa9-9fb2d44fd59a, 'name': SearchDatastore_Task, 'duration_secs': 0.011531} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.750143] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.750451] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Processing image e497cc62-282a-4a70-9770-22d80d8a1013 {{(pid=61839) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1383.750711] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.750867] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.751064] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1383.751489] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cda7c0ef-6fa5-424c-a462-92a5eab52250 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.759314] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61839) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1383.759491] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61839) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1383.760150] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd132536-db15-4b33-8ffd-af5e4af7b241 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.764802] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1383.764802] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52787c35-95f3-133b-3b5d-d8ada557ec6a" [ 1383.764802] env[61839]: _type = "Task" [ 1383.764802] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.771628] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52787c35-95f3-133b-3b5d-d8ada557ec6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.167403] env[61839]: DEBUG oslo_concurrency.lockutils [req-7f1afc67-b232-4dd3-b8b4-898d93f3428c req-1590db47-7f98-4054-96a1-cdb0214a18ae service nova] Releasing lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.275043] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52787c35-95f3-133b-3b5d-d8ada557ec6a, 'name': SearchDatastore_Task, 'duration_secs': 0.0085} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.275827] env[61839]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76efc176-5975-43df-acf0-5a54509f3aae {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.280540] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1384.280540] env[61839]: value = "session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f42c52-01f8-64e6-2e90-bb829504bcfc" [ 1384.280540] env[61839]: _type = "Task" [ 1384.280540] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.287397] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f42c52-01f8-64e6-2e90-bb829504bcfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.489240] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.489517] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.489692] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.489837] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1384.490017] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.490148] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Cleaning up deleted instances with incomplete migration {{(pid=61839) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1384.790693] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': session[524205e0-9d33-34aa-ec24-5bc23a6d28c0]52f42c52-01f8-64e6-2e90-bb829504bcfc, 'name': SearchDatastore_Task, 'duration_secs': 0.010337} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.790964] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.791252] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe/2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1384.791505] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3eb7dc3-321b-4df6-ae09-67d67a0fe42d {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.798572] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1384.798572] env[61839]: value = "task-1315218" [ 1384.798572] env[61839]: _type = "Task" [ 1384.798572] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.311443] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315218, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.810070] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315218, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532409} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.810070] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e497cc62-282a-4a70-9770-22d80d8a1013/e497cc62-282a-4a70-9770-22d80d8a1013.vmdk to [datastore1] 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe/2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe.vmdk {{(pid=61839) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1385.810409] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Extending root virtual disk to 1048576 {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1385.810576] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bcf5680-b445-4739-90c7-bd332122622f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.819026] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1385.819026] env[61839]: value = "task-1315219" [ 1385.819026] env[61839]: _type = "Task" [ 1385.819026] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.826087] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315219, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.328471] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315219, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.220042} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.328871] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Extended root virtual disk {{(pid=61839) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1386.329539] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd0103e-9e22-41af-95ba-e6881ab581b1 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.350617] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe/2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1386.350885] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-313ce33d-6b2d-4604-8c73-ae44d4282dcc {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.370357] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1386.370357] env[61839]: value = "task-1315220" [ 1386.370357] env[61839]: _type = "Task" [ 1386.370357] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.378148] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315220, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.880349] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315220, 'name': ReconfigVM_Task, 'duration_secs': 0.378679} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.880627] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe/2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe.vmdk or device None with type sparse {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1386.881257] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47c8960b-806c-4c1d-9dea-cac1a3cf6f3a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.887398] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1386.887398] env[61839]: value = "task-1315221" [ 1386.887398] env[61839]: _type = "Task" [ 1386.887398] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.895220] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315221, 'name': Rename_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.397394] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315221, 'name': Rename_Task, 'duration_secs': 0.182565} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.397789] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Powering on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1387.397906] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e4a0f2e-e1ee-4a7d-96e5-3652c8969318 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.403798] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1387.403798] env[61839]: value = "task-1315222" [ 1387.403798] env[61839]: _type = "Task" [ 1387.403798] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.410920] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.915046] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315222, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.414184] env[61839]: DEBUG oslo_vmware.api [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315222, 'name': PowerOnVM_Task, 'duration_secs': 0.532428} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.414543] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Powered on the VM {{(pid=61839) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1388.414647] env[61839]: INFO nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1388.414811] env[61839]: DEBUG nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Checking state {{(pid=61839) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1388.415570] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b91614-39df-442b-8977-3f38ab265a27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.935066] env[61839]: INFO nova.compute.manager [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Took 12.30 seconds to build instance. [ 1389.436816] env[61839]: DEBUG oslo_concurrency.lockutils [None req-28cb066c-7b58-4b35-80fd-cf1f5c2c2da6 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.804s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.674136] env[61839]: DEBUG nova.compute.manager [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Received event network-changed-3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1389.674348] env[61839]: DEBUG nova.compute.manager [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Refreshing instance network info cache due to event network-changed-3624d944-a5f5-4931-ab01-2ac46a579a1d. {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1389.674568] env[61839]: DEBUG oslo_concurrency.lockutils [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] Acquiring lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.674719] env[61839]: DEBUG oslo_concurrency.lockutils [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] Acquired lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.674882] env[61839]: DEBUG nova.network.neutron [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Refreshing network info cache for port 3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1390.400601] env[61839]: DEBUG nova.network.neutron [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updated VIF entry in instance network info cache for port 3624d944-a5f5-4931-ab01-2ac46a579a1d. {{(pid=61839) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1390.401010] env[61839]: DEBUG nova.network.neutron [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating instance_info_cache with network_info: [{"id": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "address": "fa:16:3e:06:20:57", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3624d944-a5", "ovs_interfaceid": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.904418] env[61839]: DEBUG oslo_concurrency.lockutils [req-032165a3-a199-45e1-b0b1-dae3d58d2133 req-f152d6ce-131a-4413-a004-b283bc47001d service nova] Releasing lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.195171] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.195463] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.699158] env[61839]: DEBUG nova.compute.utils [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1422.202743] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.264969] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.265374] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.265495] env[61839]: INFO nova.compute.manager [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Attaching volume 51e0c9cf-69ef-4a0a-9e03-fa748a1c8095 to /dev/sdb [ 1423.318829] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9557608a-7627-436f-bf80-648b01d073eb {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.326069] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b090a7-d971-411c-8889-9347687dd00b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.338790] env[61839]: DEBUG nova.virt.block_device [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating existing volume attachment record: 05a378c4-eba3-472b-8bbe-a9258da35f73 {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1427.881518] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1427.881853] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281476', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'name': 'volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'serial': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1427.882746] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778dc1f5-d4a6-4de6-87d5-5866fc323534 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.899662] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1598659-bcb9-4176-8961-7e9b2445a4e6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.923083] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095/volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.923366] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d3bb106-f283-4a59-b8f0-8b5854b20947 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.941461] env[61839]: DEBUG oslo_vmware.api [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1427.941461] env[61839]: value = "task-1315225" [ 1427.941461] env[61839]: _type = "Task" [ 1427.941461] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.949492] env[61839]: DEBUG oslo_vmware.api [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.451618] env[61839]: DEBUG oslo_vmware.api [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315225, 'name': ReconfigVM_Task, 'duration_secs': 0.316046} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.451902] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfigured VM instance instance-00000072 to attach disk [datastore2] volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095/volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1428.456473] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b71dedde-de1b-4cd7-8cbb-69ea633d94ad {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.470488] env[61839]: DEBUG oslo_vmware.api [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1428.470488] env[61839]: value = "task-1315226" [ 1428.470488] env[61839]: _type = "Task" [ 1428.470488] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.477896] env[61839]: DEBUG oslo_vmware.api [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.980629] env[61839]: DEBUG oslo_vmware.api [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315226, 'name': ReconfigVM_Task, 'duration_secs': 0.127338} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.980995] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281476', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'name': 'volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'serial': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1430.017021] env[61839]: DEBUG nova.objects.instance [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1430.526055] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e620c93a-faa3-47dd-8002-8cc0579ab4c2 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.260s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.313771] env[61839]: DEBUG oslo_concurrency.lockutils [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.314054] env[61839]: DEBUG oslo_concurrency.lockutils [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.817715] env[61839]: DEBUG nova.compute.utils [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Using /dev/sd instead of None {{(pid=61839) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1431.992803] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.320596] env[61839]: DEBUG oslo_concurrency.lockutils [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.373659] env[61839]: DEBUG oslo_concurrency.lockutils [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.374049] env[61839]: DEBUG oslo_concurrency.lockutils [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.374194] env[61839]: INFO nova.compute.manager [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Attaching volume 2e76b1d2-416b-4149-879d-93cff5814c36 to /dev/sdc [ 1433.403762] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848c4fc3-5e13-4cd0-bc98-5cfca796c02f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.410519] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce685ec7-cfce-4349-9daa-c44a1ca9e6c6 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.423891] env[61839]: DEBUG nova.virt.block_device [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating existing volume attachment record: 805b99d2-4807-4246-8bd9-9f28b5cd70db {{(pid=61839) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1433.489217] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.489236] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.992428] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.992680] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.992976] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.993044] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61839) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1436.993915] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881d1fe0-95a6-4f54-8f69-5bb0ae0aeb98 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.002192] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d04316-f958-4e6e-9228-77e34a3562ed {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.015763] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb9d99a-253a-4f5f-937b-050e0b218466 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.021980] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6ec3c1-e9e4-4ec1-b132-21f355f04320 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.051361] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181520MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=61839) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1437.051545] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.051698] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.965230] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Volume attach. Driver type: vmdk {{(pid=61839) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1437.965534] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281477', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'name': 'volume-2e76b1d2-416b-4149-879d-93cff5814c36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'serial': '2e76b1d2-416b-4149-879d-93cff5814c36'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1437.966424] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba24654-9b7a-4b08-8daf-0d456fa10029 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.983311] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8983e9-43c9-4f19-9c4f-cada383edeac {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.009885] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] volume-2e76b1d2-416b-4149-879d-93cff5814c36/volume-2e76b1d2-416b-4149-879d-93cff5814c36.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.010161] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1691457c-1614-4994-b2ab-b26033dcf1c7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.027786] env[61839]: DEBUG oslo_vmware.api [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1438.027786] env[61839]: value = "task-1315229" [ 1438.027786] env[61839]: _type = "Task" [ 1438.027786] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.035534] env[61839]: DEBUG oslo_vmware.api [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315229, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.075393] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Instance 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61839) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.075644] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1438.075846] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61839) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1438.104226] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496f6c74-182b-4064-b7b7-90a093ea959e {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.111881] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cbbbc4-8331-4a50-81df-21dc0c660cce {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.142250] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43bf02d3-b42d-4013-97a8-3cdfabfd9a0f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.148857] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80144aee-b8f9-4239-bf7a-2e53f0a1c9f0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.161724] env[61839]: DEBUG nova.compute.provider_tree [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.536964] env[61839]: DEBUG oslo_vmware.api [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315229, 'name': ReconfigVM_Task, 'duration_secs': 0.350941} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.537291] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfigured VM instance instance-00000072 to attach disk [datastore2] volume-2e76b1d2-416b-4149-879d-93cff5814c36/volume-2e76b1d2-416b-4149-879d-93cff5814c36.vmdk or device None with type thin {{(pid=61839) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1438.541894] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-603cf49b-326d-461f-9cb3-ab7d6884adbf {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.555912] env[61839]: DEBUG oslo_vmware.api [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1438.555912] env[61839]: value = "task-1315230" [ 1438.555912] env[61839]: _type = "Task" [ 1438.555912] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.563317] env[61839]: DEBUG oslo_vmware.api [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315230, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.664614] env[61839]: DEBUG nova.scheduler.client.report [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1439.066084] env[61839]: DEBUG oslo_vmware.api [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315230, 'name': ReconfigVM_Task, 'duration_secs': 0.122654} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.066402] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281477', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'name': 'volume-2e76b1d2-416b-4149-879d-93cff5814c36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'serial': '2e76b1d2-416b-4149-879d-93cff5814c36'} {{(pid=61839) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1439.169647] env[61839]: DEBUG nova.compute.resource_tracker [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61839) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1439.169846] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.118s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.099028] env[61839]: DEBUG nova.objects.instance [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.170147] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.170439] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Starting heal instance info cache {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1440.170653] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Rebuilding the list of instances to heal {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.605025] env[61839]: DEBUG oslo_concurrency.lockutils [None req-438c136c-d92a-41b1-bb75-c321b171384f tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.231s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.738052] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquiring lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.738323] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Acquired lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.738493] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Forcefully refreshing network info cache for instance {{(pid=61839) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1440.738654] env[61839]: DEBUG nova.objects.instance [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Lazy-loading 'info_cache' on Instance uuid 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.893074] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.893321] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.397140] env[61839]: INFO nova.compute.manager [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Detaching volume 51e0c9cf-69ef-4a0a-9e03-fa748a1c8095 [ 1441.428622] env[61839]: INFO nova.virt.block_device [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Attempting to driver detach volume 51e0c9cf-69ef-4a0a-9e03-fa748a1c8095 from mountpoint /dev/sdb [ 1441.428876] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1441.429084] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281476', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'name': 'volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'serial': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1441.429989] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaad570-fa6c-4d6d-ad6f-bd711f9fe258 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.453756] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae84bda1-0f1b-4989-9758-088c9376144c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.460604] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe951a9-ccd6-4876-8ce1-9741c1b4f574 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.482587] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0825c0-a370-4309-80dc-ab34000ad529 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.496356] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] The volume has not been displaced from its original location: [datastore2] volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095/volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1441.501382] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1441.501633] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75f28d1e-7b5f-4cb4-b349-8f9060cff524 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.519298] env[61839]: DEBUG oslo_vmware.api [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1441.519298] env[61839]: value = "task-1315231" [ 1441.519298] env[61839]: _type = "Task" [ 1441.519298] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.527465] env[61839]: DEBUG oslo_vmware.api [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315231, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.028637] env[61839]: DEBUG oslo_vmware.api [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315231, 'name': ReconfigVM_Task, 'duration_secs': 0.203934} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.028880] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1442.033935] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0a9effe-ff81-4c2a-a32b-731d5996eab0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.050485] env[61839]: DEBUG oslo_vmware.api [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1442.050485] env[61839]: value = "task-1315232" [ 1442.050485] env[61839]: _type = "Task" [ 1442.050485] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.058729] env[61839]: DEBUG oslo_vmware.api [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315232, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.450429] env[61839]: DEBUG nova.network.neutron [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating instance_info_cache with network_info: [{"id": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "address": "fa:16:3e:06:20:57", "network": {"id": "10bd9432-d418-491e-9a5f-d1bd6d8de00d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-220780264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14891ffcf4e240c7b870ee7dc63f28c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3624d944-a5", "ovs_interfaceid": "3624d944-a5f5-4931-ab01-2ac46a579a1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.559843] env[61839]: DEBUG oslo_vmware.api [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315232, 'name': ReconfigVM_Task, 'duration_secs': 0.124198} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.560177] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281476', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'name': 'volume-51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095', 'serial': '51e0c9cf-69ef-4a0a-9e03-fa748a1c8095'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1442.953316] env[61839]: DEBUG oslo_concurrency.lockutils [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Releasing lock "refresh_cache-2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" {{(pid=61839) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.953542] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updated the network info_cache for instance {{(pid=61839) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1442.953766] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.953930] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.954107] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.101487] env[61839]: DEBUG nova.objects.instance [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.110113] env[61839]: DEBUG oslo_concurrency.lockutils [None req-e78b1ca5-58e4-45d4-a653-250ee43a3377 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.217s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.127992] env[61839]: DEBUG oslo_concurrency.lockutils [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.128230] env[61839]: DEBUG oslo_concurrency.lockutils [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.631601] env[61839]: INFO nova.compute.manager [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Detaching volume 2e76b1d2-416b-4149-879d-93cff5814c36 [ 1444.660670] env[61839]: INFO nova.virt.block_device [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Attempting to driver detach volume 2e76b1d2-416b-4149-879d-93cff5814c36 from mountpoint /dev/sdc [ 1444.660915] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Volume detach. Driver type: vmdk {{(pid=61839) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1444.661135] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281477', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'name': 'volume-2e76b1d2-416b-4149-879d-93cff5814c36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'serial': '2e76b1d2-416b-4149-879d-93cff5814c36'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1444.662013] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a345cecb-fe65-48c2-8829-e5e23a5556fd {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.683927] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3396d5ff-bab3-4d54-89eb-87d80015116a {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.690807] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4928b1f7-7855-4c25-b554-107d853643e8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.710053] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6abd01-ff58-4b82-a365-72c5c85cf641 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.724296] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] The volume has not been displaced from its original location: [datastore2] volume-2e76b1d2-416b-4149-879d-93cff5814c36/volume-2e76b1d2-416b-4149-879d-93cff5814c36.vmdk. No consolidation needed. {{(pid=61839) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1444.729497] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfiguring VM instance instance-00000072 to detach disk 2002 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1444.729792] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4653b0b-9ae2-472f-9cc5-6a2d395fa94f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.747259] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1444.747259] env[61839]: value = "task-1315233" [ 1444.747259] env[61839]: _type = "Task" [ 1444.747259] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.754710] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.257169] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315233, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.489070] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.489310] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.757852] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315233, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.994060] env[61839]: DEBUG oslo_service.periodic_task [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61839) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.994241] env[61839]: DEBUG nova.compute.manager [None req-834cb0ed-04cb-4990-a909-4837e2ade3a2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61839) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1446.258381] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315233, 'name': ReconfigVM_Task, 'duration_secs': 1.208783} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.258738] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Reconfigured VM instance instance-00000072 to detach disk 2002 {{(pid=61839) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1446.263121] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6131532-cc57-4d04-a9d9-da738e6acec4 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.277167] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1446.277167] env[61839]: value = "task-1315234" [ 1446.277167] env[61839]: _type = "Task" [ 1446.277167] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.284302] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315234, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.787436] env[61839]: DEBUG oslo_vmware.api [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315234, 'name': ReconfigVM_Task, 'duration_secs': 0.123642} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.787765] env[61839]: DEBUG nova.virt.vmwareapi.volumeops [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-281477', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'name': 'volume-2e76b1d2-416b-4149-879d-93cff5814c36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe', 'attached_at': '', 'detached_at': '', 'volume_id': '2e76b1d2-416b-4149-879d-93cff5814c36', 'serial': '2e76b1d2-416b-4149-879d-93cff5814c36'} {{(pid=61839) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1447.326949] env[61839]: DEBUG nova.objects.instance [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'flavor' on Instance uuid 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.334095] env[61839]: DEBUG oslo_concurrency.lockutils [None req-11392529-8ecd-4632-bb07-0e41e1cf6729 tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.205s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.514142] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.514519] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.514753] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.515019] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.515267] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.517650] env[61839]: INFO nova.compute.manager [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Terminating instance [ 1449.519527] env[61839]: DEBUG nova.compute.manager [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Start destroying the instance on the hypervisor. {{(pid=61839) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1449.519772] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Destroying instance {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1449.520949] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7cd538-10d9-4cea-ac9c-50044c7c14d8 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.529478] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Powering off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1449.529701] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce300878-01cf-4910-8d7a-d56a4635921c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.536898] env[61839]: DEBUG oslo_vmware.api [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1449.536898] env[61839]: value = "task-1315235" [ 1449.536898] env[61839]: _type = "Task" [ 1449.536898] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.544556] env[61839]: DEBUG oslo_vmware.api [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.047682] env[61839]: DEBUG oslo_vmware.api [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315235, 'name': PowerOffVM_Task, 'duration_secs': 0.166841} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.047966] env[61839]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Powered off the VM {{(pid=61839) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1450.048163] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Unregistering the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1450.048422] env[61839]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e66aaf82-d894-49e9-9e34-4a2d5aa94242 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.107958] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Unregistered the VM {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1450.108242] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Deleting contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1450.108439] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Deleting the datastore file [datastore1] 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1450.108710] env[61839]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0912c38-d0c0-4d9f-8284-5705701a14b7 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.115219] env[61839]: DEBUG oslo_vmware.api [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for the task: (returnval){ [ 1450.115219] env[61839]: value = "task-1315237" [ 1450.115219] env[61839]: _type = "Task" [ 1450.115219] env[61839]: } to complete. {{(pid=61839) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.122464] env[61839]: DEBUG oslo_vmware.api [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.626772] env[61839]: DEBUG oslo_vmware.api [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Task: {'id': task-1315237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129994} completed successfully. {{(pid=61839) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.627121] env[61839]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Deleted the datastore file {{(pid=61839) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1450.627248] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Deleted contents of the VM from datastore datastore1 {{(pid=61839) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1450.627432] env[61839]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Instance destroyed {{(pid=61839) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1450.627611] env[61839]: INFO nova.compute.manager [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1450.627875] env[61839]: DEBUG oslo.service.loopingcall [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61839) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.628093] env[61839]: DEBUG nova.compute.manager [-] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Deallocating network for instance {{(pid=61839) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1450.628203] env[61839]: DEBUG nova.network.neutron [-] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] deallocate_for_instance() {{(pid=61839) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1451.093233] env[61839]: DEBUG nova.compute.manager [req-8b1f142c-0b08-406e-b678-67512d590700 req-f9566e5e-8a76-4f96-937c-d53d98e66449 service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Received event network-vif-deleted-3624d944-a5f5-4931-ab01-2ac46a579a1d {{(pid=61839) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1451.093441] env[61839]: INFO nova.compute.manager [req-8b1f142c-0b08-406e-b678-67512d590700 req-f9566e5e-8a76-4f96-937c-d53d98e66449 service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Neutron deleted interface 3624d944-a5f5-4931-ab01-2ac46a579a1d; detaching it from the instance and deleting it from the info cache [ 1451.093624] env[61839]: DEBUG nova.network.neutron [req-8b1f142c-0b08-406e-b678-67512d590700 req-f9566e5e-8a76-4f96-937c-d53d98e66449 service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.509896] env[61839]: DEBUG nova.network.neutron [-] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Updating instance_info_cache with network_info: [] {{(pid=61839) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.597808] env[61839]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-71efc3af-9775-4175-abc2-ebb0e2891e27 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.607412] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2560d1-6f4d-483f-823c-33e895897e5f {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.630175] env[61839]: DEBUG nova.compute.manager [req-8b1f142c-0b08-406e-b678-67512d590700 req-f9566e5e-8a76-4f96-937c-d53d98e66449 service nova] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Detach interface failed, port_id=3624d944-a5f5-4931-ab01-2ac46a579a1d, reason: Instance 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe could not be found. {{(pid=61839) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1452.013340] env[61839]: INFO nova.compute.manager [-] [instance: 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe] Took 1.38 seconds to deallocate network for instance. [ 1452.520603] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.520929] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.521210] env[61839]: DEBUG nova.objects.instance [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lazy-loading 'resources' on Instance uuid 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe {{(pid=61839) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1453.056132] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837e490e-875b-4bbf-9064-532fa723031c {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.063539] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a70b08-3da4-4f05-a68c-659c38009dbe {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.092109] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea3a996-e8be-4011-a90a-6fcb444e82d0 {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.098895] env[61839]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107bd7f8-ab29-4fba-9ab9-798376b30a0b {{(pid=61839) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.111448] env[61839]: DEBUG nova.compute.provider_tree [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Inventory has not changed in ProviderTree for provider: cef329e6-1ccd-42a8-bbc4-109a06d1c908 {{(pid=61839) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.614328] env[61839]: DEBUG nova.scheduler.client.report [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Inventory has not changed for provider cef329e6-1ccd-42a8-bbc4-109a06d1c908 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61839) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1454.119065] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.139086] env[61839]: INFO nova.scheduler.client.report [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Deleted allocations for instance 2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe [ 1454.646667] env[61839]: DEBUG oslo_concurrency.lockutils [None req-60b7b752-e4a5-4428-b2d0-72d2db2833cd tempest-AttachVolumeTestJSON-1962140276 tempest-AttachVolumeTestJSON-1962140276-project-member] Lock "2dd8f5bc-7cb9-47b7-b82c-9035e4f84cbe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.132s {{(pid=61839) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}